diff --git ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
index 3805f9d..01a00d8 100644
--- ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
+++ ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
@@ -108,6 +108,8 @@ public boolean accept(File filePath) {
}
private List templatePaths = new ArrayList();
+
+ private String hiveRootDirectory;
private String outputDirectory;
@@ -185,6 +187,18 @@ public String getTemplate() {
return template;
}
+ public void setHiveRootDirectory(File hiveRootDirectory) {
+ try {
+ this.hiveRootDirectory = hiveRootDirectory.getCanonicalPath();
+ } catch (IOException ioe) {
+ throw new BuildException(ioe);
+ }
+ }
+
+ public String getHiveRootDirectory() {
+ return hiveRootDirectory;
+ }
+
public void setTemplatePath(String templatePath) throws Exception {
templatePaths.clear();
for (String relativePath : templatePath.split(",")) {
@@ -302,14 +316,15 @@ public void execute() throws BuildException {
List qFiles = new ArrayList();
HashMap qFilesMap = new HashMap();
+ File hiveRootDir = null;
+ File queryDir = null;
File outDir = null;
File resultsDir = null;
File logDir = null;
try {
- File inpDir = null;
if (queryDirectory != null) {
- inpDir = new File(queryDirectory);
+ queryDir = new File(queryDirectory);
}
if (queryFile != null && !queryFile.equals("")) {
@@ -318,31 +333,37 @@ public void execute() throws BuildException {
if (includeOnly != null && !includeOnly.contains(qFile)) {
continue;
}
- if (null != inpDir) {
- qFiles.add(new File(inpDir, qFile));
+ if (null != queryDir) {
+ qFiles.add(new File(queryDir, qFile));
} else {
qFiles.add(new File(qFile));
}
}
} else if (queryFileRegex != null && !queryFileRegex.equals("")) {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(
new QFileRegexFilter(queryFileRegex, includeOnly))));
} else if (runDisabled != null && runDisabled.equals("true")) {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(new DisabledQFileFilter(includeOnly))));
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(new DisabledQFileFilter(includeOnly))));
} else {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileFilter(includeOnly))));
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(new QFileFilter(includeOnly))));
}
if (excludeQueryFile != null && !excludeQueryFile.equals("")) {
// Exclude specified query files, comma separated
for (String qFile : excludeQueryFile.split(",")) {
- if (null != inpDir) {
- qFiles.remove(new File(inpDir, qFile));
+ if (null != queryDir) {
+ qFiles.remove(new File(queryDir, qFile));
} else {
qFiles.remove(new File(qFile));
}
}
}
+
+ hiveRootDir = new File(hiveRootDirectory);
+ if (!hiveRootDir.exists()) {
+ throw new BuildException("Hive Root Directory "
+ + hiveRootDir.getCanonicalPath() + " does not exist");
+ }
Collections.sort(qFiles);
for (File qFile : qFiles) {
@@ -397,6 +418,8 @@ public void execute() throws BuildException {
// For each of the qFiles generate the test
VelocityContext ctx = new VelocityContext();
ctx.put("className", className);
+ ctx.put("hiveRootDir", getEscapedCanonicalPath(hiveRootDir));
+ ctx.put("queryDir", getEscapedCanonicalPath(queryDir));
ctx.put("qfiles", qFiles);
ctx.put("qfilesMap", qFilesMap);
ctx.put("resultsDir", getEscapedCanonicalPath(resultsDir));
diff --git bin/ext/beeline.sh bin/ext/beeline.sh
index 4195d3d..6c0435d 100644
--- bin/ext/beeline.sh
+++ bin/ext/beeline.sh
@@ -18,13 +18,12 @@ THISSERVICE=beeline
export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
beeline () {
-
- CLASS=org.apache.hive.jdbc.beeline.HiveBeeline;
+ CLASS=org.apache.hive.beeline.BeeLine;
execHiveCmd $CLASS "$@"
}
beeline_help () {
- CLASS=org.apache.hive.jdbc.beeline.HiveBeeline;
+ CLASS=org.apache.hive.beeline.BeeLine;
execHiveCmd $CLASS "--help"
}
diff --git bin/ext/hiveserver2.sh bin/ext/hiveserver2.sh
new file mode 100644
index 0000000..b57e96b
--- /dev/null
+++ bin/ext/hiveserver2.sh
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hiveserver2
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hiveserver2() {
+ echo "Starting HiveServer2"
+ CLASS=org.apache.hive.service.server.HiveServer2
+ if $cygwin; then
+ HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+ fi
+ JAR=${HIVE_LIB}/hive-service-*.jar
+
+ exec $HADOOP jar $JAR $CLASS "$@"
+}
+
+hiveserver2_help() {
+ hiveserver2 -h
+}
+
diff --git bin/hiveserver2 bin/hiveserver2
new file mode 100644
index 0000000..c066ffd
--- /dev/null
+++ bin/hiveserver2
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hive --service hiveserver2 "$@"
diff --git build-common.xml build-common.xml
index e68ecea..e565e2e 100644
--- build-common.xml
+++ build-common.xml
@@ -38,7 +38,9 @@
+
+
@@ -77,6 +79,7 @@
+
@@ -97,6 +100,7 @@
+
+
+
@@ -226,6 +232,7 @@
+
@@ -280,7 +287,7 @@
+
+
+
+
+
+
@@ -413,8 +426,10 @@
+
+
@@ -422,6 +437,7 @@
+
@@ -440,9 +456,11 @@
+
+
@@ -486,6 +504,23 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git build.properties build.properties
index 2d293a6..b0df291 100644
--- build.properties
+++ build.properties
@@ -72,8 +72,8 @@ jsp.test.jar=${hadoop.root}/lib/jetty-ext/jsp-api.jar
common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
# module names needed for build process
-iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,hwi,hbase-handler,pdk,builtins
-iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,hwi,hbase-handler,pdk,builtins
+iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins
+iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins
iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,metastore,odbc,serde,service
iterate.hive.thrift=ql,service,metastore,serde
iterate.hive.protobuf=ql
@@ -92,7 +92,16 @@ test.junit.timeout=43200000
# Use this property to selectively disable tests from the command line:
# ant test -Dtest.junit.exclude="**/TestCliDriver.class"
# ant test -Dtest.junit.exclude="**/Test*CliDriver.class,**/TestPartitions.class"
-test.junit.exclude=
+test.junit.exclude="**/TestBeeLineDriver.class, **/TestHiveServer2Concurrency.class"
+test.continue.on.failure=false
+
+test.submodule.exclude=
+test.junit.maxmemory=512m
+
+test.concurrency.num.threads=1
+#test.beelinepositive.exclude=add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rename.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_overwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q
+
+
#
# Ivy Properties
@@ -108,7 +117,7 @@ ivy.changingPattern=.*SNAPSHOT
ivy.publish.pattern=[artifact]-[revision].[ext]
ivy.artifact.retrieve.pattern=[conf]/[artifact]-[revision](-[classifier]).[ext]
ivysettings.xml=${ivy.conf.dir}/ivysettings.xml
-ivyresolvelog=download-only
+ivyresolvelog=default
ivy.mvn.repo=http://repo2.maven.org/maven2
ivy_repo_url=${ivy.mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar
hive.ivy.org=org.apache.hive
diff --git build.xml build.xml
index b5c69d3..9e656d6 100644
--- build.xml
+++ build.xml
@@ -138,6 +138,7 @@
+
@@ -391,16 +392,18 @@
+
+
+
+
+
+
-
-
-
-
@@ -447,6 +450,9 @@
+
+
+
@@ -460,14 +466,15 @@
+
+
-
@@ -660,6 +667,7 @@
+
@@ -884,89 +892,91 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+ description="Resolve, Retrieve Ivy-managed artifacts for docs configuration">
+
+
+
-
-
+
+
+
-
-
+
+
@@ -1169,6 +1185,12 @@
+
+
+
+
+
+
@@ -1187,18 +1209,15 @@
-
-
-
-
-
-
+
+
+
@@ -1240,6 +1259,14 @@
output.file="${mvn.pom.dir}/hive-anttasks-${version}.pom.asc"
gpg.passphrase="${gpg.passphrase}"/>
+
+
@@ -1264,6 +1291,14 @@
output.file="${mvn.pom.dir}/hive-contrib-${version}.pom.asc"
gpg.passphrase="${gpg.passphrase}"/>
+
+
@@ -1296,14 +1331,6 @@
output.file="${mvn.pom.dir}/hive-metastore-${version}.pom.asc"
gpg.passphrase="${gpg.passphrase}"/>
-
-
@@ -1312,14 +1339,6 @@
output.file="${mvn.pom.dir}/hive-pdk-${version}.pom.asc"
gpg.passphrase="${gpg.passphrase}"/>
-
-
diff --git cli/build.xml cli/build.xml
index 6e70d5f..092a68b 100755
--- cli/build.xml
+++ cli/build.xml
@@ -40,6 +40,11 @@ to call at top-level: ant deploy-contrib compile-core-test
+
+
+
+
+
diff --git cli/ivy.xml cli/ivy.xml
index 0d1c64a..4bf543e 100644
--- cli/ivy.xml
+++ cli/ivy.xml
@@ -30,6 +30,7 @@
+
diff --git common/.gitignore common/.gitignore
new file mode 100644
index 0000000..fa75d26
--- /dev/null
+++ common/.gitignore
@@ -0,0 +1 @@
+src/gen/org/apache/hive/common/package-info.java
diff --git common/build.xml common/build.xml
index 24ad8f5..731f26e 100755
--- common/build.xml
+++ common/build.xml
@@ -29,6 +29,11 @@ to call at top-level: ant deploy-contrib compile-core-test
+
+
+
+
+ vars = new HashMap();
+ private final List restrictList = new ArrayList();
static {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
@@ -146,6 +149,16 @@
};
/**
+ * The conf variables that depends on current user
+ */
+ public static final HiveConf.ConfVars[] userVars = {
+ HiveConf.ConfVars.SCRATCHDIR,
+ HiveConf.ConfVars.LOCALSCRATCHDIR,
+ HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR,
+ HiveConf.ConfVars.HIVEHISTORYFILELOC
+ };
+
+ /**
* ConfVars.
*
* These are the default configuration properties for Hive. Each HiveConf
@@ -688,6 +701,24 @@
HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null),
HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"),
+ HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5),
+ HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 100),
+
+ HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000),
+ HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""),
+
+
+ // HiveServer2 auth configuration
+ HIVE_SERVER2_AUTHENTICATION("hive.server2.authentication", "NONE"),
+ HIVE_SERVER2_KERBEROS_KEYTAB("hive.server2.authentication.kerberos.keytab", ""),
+ HIVE_SERVER2_KERBEROS_PRINCIPAL("hive.server2.authentication.kerberos.principal", ""),
+ HIVE_SERVER2_PLAIN_LDAP_URL("hive.server2.authentication.ldap.url", null),
+ HIVE_SERVER2_PLAIN_LDAP_BASEDN("hive.server2.authentication.ldap.baseDN", null),
+ HIVE_SERVER2_KERBEROS_IMPERSONATION("hive.server2.enable.impersonation", false),
+ HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS("hive.server2.custom.authentication.class", null),
+
+ HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", null),
+
// If this is set all move tasks at the end of a multi-insert query will only begin once all
// outputs are ready
HIVE_MULTI_INSERT_MOVE_TASKS_SHARE_DEPENDENCIES(
@@ -870,6 +901,13 @@ private static synchronized InputStream getConfVarInputStream() {
return new LoopingByteArrayInputStream(confVarByteArray);
}
+ public void verifyAndSet(String name, String value) throws IllegalArgumentException {
+ if (restrictList.contains(name)) {
+ throw new IllegalArgumentException("Cann't modify " + name + " at runtime");
+ }
+ set(name, value);
+ }
+
public static int getIntVar(Configuration conf, ConfVars var) {
assert (var.valClass == Integer.class);
return conf.getInt(var.varname, var.defaultIntVal);
@@ -1057,8 +1095,18 @@ private void initialize(Class> cls) {
if (auxJars == null) {
auxJars = this.get(ConfVars.HIVEAUXJARS.varname);
}
+
+ // setup list of conf vars that are not allowed to change runtime
+ String restrictListStr = this.get(ConfVars.HIVE_CONF_RESTRICTED_LIST.toString());
+ if (restrictListStr != null) {
+ for (String entry : restrictListStr.split(",")) {
+ restrictList.add(entry);
+ }
+ }
+ restrictList.add(ConfVars.HIVE_CONF_RESTRICTED_LIST.toString());
}
+
/**
* Apply system properties to this object if the property name is defined in ConfVars
* and the value is non-null and not an empty string.
diff --git common/src/java/org/apache/hive/common/HiveVersionAnnotation.java common/src/java/org/apache/hive/common/HiveVersionAnnotation.java
new file mode 100644
index 0000000..0060ce8
--- /dev/null
+++ common/src/java/org/apache/hive/common/HiveVersionAnnotation.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+
+/**
+ * HiveVersionAnnotation.
+ *
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.PACKAGE)
+@InterfaceStability.Unstable
+public @interface HiveVersionAnnotation {
+
+ /**
+ * Get the Hive version
+ * @return the version string "0.6.3-dev"
+ */
+ String version();
+
+ /**
+ * Get the username that compiled Hive.
+ */
+ String user();
+
+ /**
+ * Get the date when Hive was compiled.
+ * @return the date in unix 'date' format
+ */
+ String date();
+
+ /**
+ * Get the url for the subversion repository.
+ */
+ String url();
+
+ /**
+ * Get the subversion revision.
+ * @return the revision number as a string (eg. "451451")
+ */
+ String revision();
+
+ /**
+ * Get the branch from which this was compiled.
+ * @return The branch name, e.g. "trunk" or "branches/branch-0.20"
+ */
+ String branch();
+
+ /**
+ * Get a checksum of the source files from which
+ * Hive was compiled.
+ * @return a string that uniquely identifies the source
+ **/
+ String srcChecksum();
+
+}
diff --git common/src/java/org/apache/hive/common/util/HiveStringUtils.java common/src/java/org/apache/hive/common/util/HiveStringUtils.java
new file mode 100644
index 0000000..fa995cd
--- /dev/null
+++ common/src/java/org/apache/hive/common/util/HiveStringUtils.java
@@ -0,0 +1,810 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.net.InetAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.UnknownHostException;
+import java.text.DateFormat;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+
+/**
+ * HiveStringUtils
+ * General string utils
+ *
+ * Originally copied from o.a.hadoop.util.StringUtils
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class HiveStringUtils {
+
+ /**
+ * Priority of the StringUtils shutdown hook.
+ */
+ public static final int SHUTDOWN_HOOK_PRIORITY = 0;
+
+ private static final DecimalFormat decimalFormat;
+ static {
+ NumberFormat numberFormat = NumberFormat.getNumberInstance(Locale.ENGLISH);
+ decimalFormat = (DecimalFormat) numberFormat;
+ decimalFormat.applyPattern("#.##");
+ }
+
+ /**
+ * Make a string representation of the exception.
+ * @param e The exception to stringify
+ * @return A string with exception name and call stack.
+ */
+ public static String stringifyException(Throwable e) {
+ StringWriter stm = new StringWriter();
+ PrintWriter wrt = new PrintWriter(stm);
+ e.printStackTrace(wrt);
+ wrt.close();
+ return stm.toString();
+ }
+
+ /**
+ * Given a full hostname, return the word upto the first dot.
+ * @param fullHostname the full hostname
+ * @return the hostname to the first dot
+ */
+ public static String simpleHostname(String fullHostname) {
+ int offset = fullHostname.indexOf('.');
+ if (offset != -1) {
+ return fullHostname.substring(0, offset);
+ }
+ return fullHostname;
+ }
+
+ private static DecimalFormat oneDecimal = new DecimalFormat("0.0");
+
+ /**
+ * Given an integer, return a string that is in an approximate, but human
+ * readable format.
+ * It uses the bases 'k', 'm', and 'g' for 1024, 1024**2, and 1024**3.
+ * @param number the number to format
+ * @return a human readable form of the integer
+ */
+ public static String humanReadableInt(long number) {
+ long absNumber = Math.abs(number);
+ double result = number;
+ String suffix = "";
+ if (absNumber < 1024) {
+ // since no division has occurred, don't format with a decimal point
+ return String.valueOf(number);
+ } else if (absNumber < 1024 * 1024) {
+ result = number / 1024.0;
+ suffix = "k";
+ } else if (absNumber < 1024 * 1024 * 1024) {
+ result = number / (1024.0 * 1024);
+ suffix = "m";
+ } else {
+ result = number / (1024.0 * 1024 * 1024);
+ suffix = "g";
+ }
+ return oneDecimal.format(result) + suffix;
+ }
+
+ /**
+ * Format a percentage for presentation to the user.
+ * @param done the percentage to format (0.0 to 1.0)
+ * @param digits the number of digits past the decimal point
+ * @return a string representation of the percentage
+ */
+ public static String formatPercent(double done, int digits) {
+ DecimalFormat percentFormat = new DecimalFormat("0.00%");
+ double scale = Math.pow(10.0, digits+2);
+ double rounded = Math.floor(done * scale);
+ percentFormat.setDecimalSeparatorAlwaysShown(false);
+ percentFormat.setMinimumFractionDigits(digits);
+ percentFormat.setMaximumFractionDigits(digits);
+ return percentFormat.format(rounded / scale);
+ }
+
+ /**
+ * Given an array of strings, return a comma-separated list of its elements.
+ * @param strs Array of strings
+ * @return Empty string if strs.length is 0, comma separated list of strings
+ * otherwise
+ */
+
+ public static String arrayToString(String[] strs) {
+ if (strs.length == 0) { return ""; }
+ StringBuilder sbuf = new StringBuilder();
+ sbuf.append(strs[0]);
+ for (int idx = 1; idx < strs.length; idx++) {
+ sbuf.append(",");
+ sbuf.append(strs[idx]);
+ }
+ return sbuf.toString();
+ }
+
+ /**
+ * Given an array of bytes it will convert the bytes to a hex string
+ * representation of the bytes
+ * @param bytes
+ * @param start start index, inclusively
+ * @param end end index, exclusively
+ * @return hex string representation of the byte array
+ */
+ public static String byteToHexString(byte[] bytes, int start, int end) {
+ if (bytes == null) {
+ throw new IllegalArgumentException("bytes == null");
+ }
+ StringBuilder s = new StringBuilder();
+ for(int i = start; i < end; i++) {
+ s.append(String.format("%02x", bytes[i]));
+ }
+ return s.toString();
+ }
+
+ /** Same as byteToHexString(bytes, 0, bytes.length). */
+ public static String byteToHexString(byte bytes[]) {
+ return byteToHexString(bytes, 0, bytes.length);
+ }
+
+ /**
+ * Given a hexstring this will return the byte array corresponding to the
+ * string
+ * @param hex the hex String array
+ * @return a byte array that is a hex string representation of the given
+ * string. The size of the byte array is therefore hex.length/2
+ */
+ public static byte[] hexStringToByte(String hex) {
+ byte[] bts = new byte[hex.length() / 2];
+ for (int i = 0; i < bts.length; i++) {
+ bts[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16);
+ }
+ return bts;
+ }
+ /**
+ *
+ * @param uris
+ */
+ public static String uriToString(URI[] uris){
+ if (uris == null) {
+ return null;
+ }
+ StringBuilder ret = new StringBuilder(uris[0].toString());
+ for(int i = 1; i < uris.length;i++){
+ ret.append(",");
+ ret.append(uris[i].toString());
+ }
+ return ret.toString();
+ }
+
+ /**
+ * @param str
+ * The string array to be parsed into an URI array.
+ * @return null if str is null, else the URI array
+ * equivalent to str.
+ * @throws IllegalArgumentException
+ * If any string in str violates RFC 2396.
+ */
+ public static URI[] stringToURI(String[] str){
+ if (str == null) {
+ return null;
+ }
+ URI[] uris = new URI[str.length];
+ for (int i = 0; i < str.length;i++){
+ try{
+ uris[i] = new URI(str[i]);
+ }catch(URISyntaxException ur){
+ throw new IllegalArgumentException(
+ "Failed to create uri for " + str[i], ur);
+ }
+ }
+ return uris;
+ }
+
+ /**
+ *
+ * @param str
+ */
+ public static Path[] stringToPath(String[] str){
+ if (str == null) {
+ return null;
+ }
+ Path[] p = new Path[str.length];
+ for (int i = 0; i < str.length;i++){
+ p[i] = new Path(str[i]);
+ }
+ return p;
+ }
+ /**
+ *
+ * Given a finish and start time in long milliseconds, returns a
+ * String in the format Xhrs, Ymins, Z sec, for the time difference between two times.
+ * If finish time comes before start time then negative valeus of X, Y and Z wil return.
+ *
+ * @param finishTime finish time
+ * @param startTime start time
+ */
+ public static String formatTimeDiff(long finishTime, long startTime){
+ long timeDiff = finishTime - startTime;
+ return formatTime(timeDiff);
+ }
+
+ /**
+ *
+ * Given the time in long milliseconds, returns a
+ * String in the format Xhrs, Ymins, Z sec.
+ *
+ * @param timeDiff The time difference to format
+ */
+ public static String formatTime(long timeDiff){
+ StringBuilder buf = new StringBuilder();
+ long hours = timeDiff / (60*60*1000);
+ long rem = (timeDiff % (60*60*1000));
+ long minutes = rem / (60*1000);
+ rem = rem % (60*1000);
+ long seconds = rem / 1000;
+
+ if (hours != 0){
+ buf.append(hours);
+ buf.append("hrs, ");
+ }
+ if (minutes != 0){
+ buf.append(minutes);
+ buf.append("mins, ");
+ }
+ // return "0sec if no difference
+ buf.append(seconds);
+ buf.append("sec");
+ return buf.toString();
+ }
+ /**
+ * Formats time in ms and appends difference (finishTime - startTime)
+ * as returned by formatTimeDiff().
+ * If finish time is 0, empty string is returned, if start time is 0
+ * then difference is not appended to return value.
+ * @param dateFormat date format to use
+ * @param finishTime fnish time
+ * @param startTime start time
+ * @return formatted value.
+ */
+ public static String getFormattedTimeWithDiff(DateFormat dateFormat,
+ long finishTime, long startTime){
+ StringBuilder buf = new StringBuilder();
+ if (0 != finishTime) {
+ buf.append(dateFormat.format(new Date(finishTime)));
+ if (0 != startTime){
+ buf.append(" (" + formatTimeDiff(finishTime , startTime) + ")");
+ }
+ }
+ return buf.toString();
+ }
+
+ /**
+ * Returns an arraylist of strings.
+ * @param str the comma seperated string values
+ * @return the arraylist of the comma seperated string values
+ */
+ public static String[] getStrings(String str){
+ Collection values = getStringCollection(str);
+ if(values.size() == 0) {
+ return null;
+ }
+ return values.toArray(new String[values.size()]);
+ }
+
+ /**
+ * Returns a collection of strings.
+ * @param str comma seperated string values
+ * @return an ArrayList of string values
+ */
+ public static Collection getStringCollection(String str){
+ List values = new ArrayList();
+ if (str == null) {
+ return values;
+ }
+ StringTokenizer tokenizer = new StringTokenizer (str,",");
+ values = new ArrayList();
+ while (tokenizer.hasMoreTokens()) {
+ values.add(tokenizer.nextToken());
+ }
+ return values;
+ }
+
+ /**
+ * Splits a comma separated value String, trimming leading and trailing whitespace on each value.
+ * @param str a comma separated with values
+ * @return a Collection of String values
+ */
+ public static Collection getTrimmedStringCollection(String str){
+ return new ArrayList(
+ Arrays.asList(getTrimmedStrings(str)));
+ }
+
+ /**
+ * Splits a comma separated value String, trimming leading and trailing whitespace on each value.
+ * @param str a comma separated with values
+ * @return an array of String values
+ */
+ public static String[] getTrimmedStrings(String str){
+ if (null == str || "".equals(str.trim())) {
+ return emptyStringArray;
+ }
+
+ return str.trim().split("\\s*,\\s*");
+ }
+
+ final public static String[] emptyStringArray = {};
+ final public static char COMMA = ',';
+ final public static String COMMA_STR = ",";
+ final public static char ESCAPE_CHAR = '\\';
+
+ /**
+ * Split a string using the default separator
+ * @param str a string that may have escaped separator
+ * @return an array of strings
+ */
+ public static String[] split(String str) {
+ return split(str, ESCAPE_CHAR, COMMA);
+ }
+
+ /**
+ * Split a string using the given separator
+ * @param str a string that may have escaped separator
+ * @param escapeChar a char that be used to escape the separator
+ * @param separator a separator char
+ * @return an array of strings
+ */
+ public static String[] split(
+ String str, char escapeChar, char separator) {
+ if (str==null) {
+ return null;
+ }
+ ArrayList strList = new ArrayList();
+ StringBuilder split = new StringBuilder();
+ int index = 0;
+ while ((index = findNext(str, separator, escapeChar, index, split)) >= 0) {
+ ++index; // move over the separator for next search
+ strList.add(split.toString());
+ split.setLength(0); // reset the buffer
+ }
+ strList.add(split.toString());
+ // remove trailing empty split(s)
+ int last = strList.size(); // last split
+ while (--last>=0 && "".equals(strList.get(last))) {
+ strList.remove(last);
+ }
+ return strList.toArray(new String[strList.size()]);
+ }
+
+ /**
+ * Split a string using the given separator, with no escaping performed.
+ * @param str a string to be split. Note that this may not be null.
+ * @param separator a separator char
+ * @return an array of strings
+ */
+ public static String[] split(
+ String str, char separator) {
+ // String.split returns a single empty result for splitting the empty
+ // string.
+ if ("".equals(str)) {
+ return new String[]{""};
+ }
+ ArrayList strList = new ArrayList();
+ int startIndex = 0;
+ int nextIndex = 0;
+ while ((nextIndex = str.indexOf((int)separator, startIndex)) != -1) {
+ strList.add(str.substring(startIndex, nextIndex));
+ startIndex = nextIndex + 1;
+ }
+ strList.add(str.substring(startIndex));
+ // remove trailing empty split(s)
+ int last = strList.size(); // last split
+ while (--last>=0 && "".equals(strList.get(last))) {
+ strList.remove(last);
+ }
+ return strList.toArray(new String[strList.size()]);
+ }
+
+ /**
+ * Finds the first occurrence of the separator character ignoring the escaped
+ * separators starting from the index. Note the substring between the index
+ * and the position of the separator is passed.
+ * @param str the source string
+ * @param separator the character to find
+ * @param escapeChar character used to escape
+ * @param start from where to search
+ * @param split used to pass back the extracted string
+ */
+ public static int findNext(String str, char separator, char escapeChar,
+ int start, StringBuilder split) {
+ int numPreEscapes = 0;
+ for (int i = start; i < str.length(); i++) {
+ char curChar = str.charAt(i);
+ if (numPreEscapes == 0 && curChar == separator) { // separator
+ return i;
+ } else {
+ split.append(curChar);
+ numPreEscapes = (curChar == escapeChar)
+ ? (++numPreEscapes) % 2
+ : 0;
+ }
+ }
+ return -1;
+ }
+
+ /**
+ * Escape commas in the string using the default escape char
+ * @param str a string
+ * @return an escaped string
+ */
+ public static String escapeString(String str) {
+ return escapeString(str, ESCAPE_CHAR, COMMA);
+ }
+
+ /**
+ * Escape charToEscape in the string
+ * with the escape char escapeChar
+ *
+ * @param str string
+ * @param escapeChar escape char
+ * @param charToEscape the char to be escaped
+ * @return an escaped string
+ */
+ public static String escapeString(
+ String str, char escapeChar, char charToEscape) {
+ return escapeString(str, escapeChar, new char[] {charToEscape});
+ }
+
+ // check if the character array has the character
+ private static boolean hasChar(char[] chars, char character) {
+ for (char target : chars) {
+ if (character == target) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * @param charsToEscape array of characters to be escaped
+ */
+ public static String escapeString(String str, char escapeChar,
+ char[] charsToEscape) {
+ if (str == null) {
+ return null;
+ }
+ StringBuilder result = new StringBuilder();
+ for (int i=0; icharToEscape in the string
+ * with the escape char escapeChar
+ *
+ * @param str string
+ * @param escapeChar escape char
+ * @param charToEscape the escaped char
+ * @return an unescaped string
+ */
+ public static String unEscapeString(
+ String str, char escapeChar, char charToEscape) {
+ return unEscapeString(str, escapeChar, new char[] {charToEscape});
+ }
+
+ /**
+ * @param charsToEscape array of characters to unescape
+ */
+ public static String unEscapeString(String str, char escapeChar,
+ char[] charsToEscape) {
+ if (str == null) {
+ return null;
+ }
+ StringBuilder result = new StringBuilder(str.length());
+ boolean hasPreEscape = false;
+ for (int i=0; i clazz, String[] args,
+ final org.apache.commons.logging.Log LOG) {
+ final String hostname = getHostname();
+ final String classname = clazz.getSimpleName();
+ LOG.info(
+ toStartupShutdownString("STARTUP_MSG: ", new String[] {
+ "Starting " + classname,
+ " host = " + hostname,
+ " args = " + Arrays.asList(args),
+ " version = " + HiveVersionInfo.getVersion(),
+ " classpath = " + System.getProperty("java.class.path"),
+ " build = " + HiveVersionInfo.getUrl() + " -r "
+ + HiveVersionInfo.getRevision()
+ + "; compiled by '" + HiveVersionInfo.getUser()
+ + "' on " + HiveVersionInfo.getDate()}
+ )
+ );
+
+ ShutdownHookManager.addShutdownHook(
+ new Runnable() {
+ @Override
+ public void run() {
+ LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{
+ "Shutting down " + classname + " at " + hostname}));
+ }
+ }, SHUTDOWN_HOOK_PRIORITY);
+
+ }
+
+ /**
+ * Return hostname without throwing exception.
+ * @return hostname
+ */
+ public static String getHostname() {
+ try {return "" + InetAddress.getLocalHost();}
+ catch(UnknownHostException uhe) {return "" + uhe;}
+ }
+
+
+ /**
+ * The traditional binary prefixes, kilo, mega, ..., exa,
+ * which can be represented by a 64-bit integer.
+ * TraditionalBinaryPrefix symbol are case insensitive.
+ */
+ public static enum TraditionalBinaryPrefix {
+ KILO(1024),
+ MEGA(KILO.value << 10),
+ GIGA(MEGA.value << 10),
+ TERA(GIGA.value << 10),
+ PETA(TERA.value << 10),
+ EXA(PETA.value << 10);
+
+ public final long value;
+ public final char symbol;
+
+ TraditionalBinaryPrefix(long value) {
+ this.value = value;
+ this.symbol = toString().charAt(0);
+ }
+
+ /**
+ * @return The TraditionalBinaryPrefix object corresponding to the symbol.
+ */
+ public static TraditionalBinaryPrefix valueOf(char symbol) {
+ symbol = Character.toUpperCase(symbol);
+ for(TraditionalBinaryPrefix prefix : TraditionalBinaryPrefix.values()) {
+ if (symbol == prefix.symbol) {
+ return prefix;
+ }
+ }
+ throw new IllegalArgumentException("Unknown symbol '" + symbol + "'");
+ }
+
+ /**
+ * Convert a string to long.
+ * The input string is first be trimmed
+ * and then it is parsed with traditional binary prefix.
+ *
+ * For example,
+ * "-1230k" will be converted to -1230 * 1024 = -1259520;
+ * "891g" will be converted to 891 * 1024^3 = 956703965184;
+ *
+ * @param s input string
+ * @return a long value represented by the input string.
+ */
+ public static long string2long(String s) {
+ s = s.trim();
+ final int lastpos = s.length() - 1;
+ final char lastchar = s.charAt(lastpos);
+ if (Character.isDigit(lastchar)) {
+ return Long.parseLong(s);
+ } else {
+ long prefix;
+ try {
+ prefix = TraditionalBinaryPrefix.valueOf(lastchar).value;
+ } catch (IllegalArgumentException e) {
+ throw new IllegalArgumentException("Invalid size prefix '" + lastchar
+ + "' in '" + s
+ + "'. Allowed prefixes are k, m, g, t, p, e(case insensitive)");
+ }
+ long num = Long.parseLong(s.substring(0, lastpos));
+ if (num > (Long.MAX_VALUE/prefix) || num < (Long.MIN_VALUE/prefix)) {
+ throw new IllegalArgumentException(s + " does not fit in a Long");
+ }
+ return num * prefix;
+ }
+ }
+ }
+
+ /**
+ * Escapes HTML Special characters present in the string.
+ * @param string
+ * @return HTML Escaped String representation
+ */
+ public static String escapeHTML(String string) {
+ if(string == null) {
+ return null;
+ }
+ StringBuilder sb = new StringBuilder();
+ boolean lastCharacterWasSpace = false;
+ char[] chars = string.toCharArray();
+ for(char c : chars) {
+ if(c == ' ') {
+ if(lastCharacterWasSpace){
+ lastCharacterWasSpace = false;
+ sb.append(" ");
+ }else {
+ lastCharacterWasSpace=true;
+ sb.append(" ");
+ }
+ }else {
+ lastCharacterWasSpace = false;
+ switch(c) {
+ case '<': sb.append("<"); break;
+ case '>': sb.append(">"); break;
+ case '&': sb.append("&"); break;
+ case '"': sb.append("""); break;
+ default : sb.append(c);break;
+ }
+ }
+ }
+
+ return sb.toString();
+ }
+
+ /**
+ * Return an abbreviated English-language desc of the byte length
+ */
+ public static String byteDesc(long len) {
+ double val = 0.0;
+ String ending = "";
+ if (len < 1024 * 1024) {
+ val = (1.0 * len) / 1024;
+ ending = " KB";
+ } else if (len < 1024 * 1024 * 1024) {
+ val = (1.0 * len) / (1024 * 1024);
+ ending = " MB";
+ } else if (len < 1024L * 1024 * 1024 * 1024) {
+ val = (1.0 * len) / (1024 * 1024 * 1024);
+ ending = " GB";
+ } else if (len < 1024L * 1024 * 1024 * 1024 * 1024) {
+ val = (1.0 * len) / (1024L * 1024 * 1024 * 1024);
+ ending = " TB";
+ } else {
+ val = (1.0 * len) / (1024L * 1024 * 1024 * 1024 * 1024);
+ ending = " PB";
+ }
+ return limitDecimalTo2(val) + ending;
+ }
+
+ public static synchronized String limitDecimalTo2(double d) {
+ return decimalFormat.format(d);
+ }
+
+ /**
+ * Concatenates strings, using a separator.
+ *
+ * @param separator Separator to join with.
+ * @param strings Strings to join.
+ */
+ public static String join(CharSequence separator, Iterable> strings) {
+ Iterator> i = strings.iterator();
+ if (!i.hasNext()) {
+ return "";
+ }
+ StringBuilder sb = new StringBuilder(i.next().toString());
+ while (i.hasNext()) {
+ sb.append(separator);
+ sb.append(i.next().toString());
+ }
+ return sb.toString();
+ }
+
+ /**
+ * Convert SOME_STUFF to SomeStuff
+ *
+ * @param s input string
+ * @return camelized string
+ */
+ public static String camelize(String s) {
+ StringBuilder sb = new StringBuilder();
+ String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
+
+ for (String word : words) {
+ sb.append(org.apache.commons.lang.StringUtils.capitalize(word));
+ }
+
+ return sb.toString();
+ }
+
+}
diff --git common/src/java/org/apache/hive/common/util/HiveVersionInfo.java common/src/java/org/apache/hive/common/util/HiveVersionInfo.java
new file mode 100644
index 0000000..bd59b81
--- /dev/null
+++ common/src/java/org/apache/hive/common/util/HiveVersionInfo.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+import org.apache.hive.common.HiveVersionAnnotation;
+
+/**
+ * HiveVersionInfo.
+ *
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class HiveVersionInfo {
+ private static final Log LOG = LogFactory.getLog(HiveVersionInfo.class);
+
+ private static Package myPackage;
+ private static HiveVersionAnnotation version;
+
+ static {
+ myPackage = HiveVersionAnnotation.class.getPackage();
+ version = myPackage.getAnnotation(HiveVersionAnnotation.class);
+ }
+
+ /**
+ * Get the meta-data for the Hive package.
+ * @return
+ */
+ static Package getPackage() {
+ return myPackage;
+ }
+
+ /**
+ * Get the Hive version.
+ * @return the Hive version string, eg. "0.6.3-dev"
+ */
+ public static String getVersion() {
+ return version != null ? version.version() : "Unknown";
+ }
+
+ /**
+ * Get the subversion revision number for the root directory
+ * @return the revision number, eg. "451451"
+ */
+ public static String getRevision() {
+ return version != null ? version.revision() : "Unknown";
+ }
+
+ /**
+ * Get the branch on which this originated.
+ * @return The branch name, e.g. "trunk" or "branches/branch-0.20"
+ */
+ public static String getBranch() {
+ return version != null ? version.branch() : "Unknown";
+ }
+
+ /**
+ * The date that Hive was compiled.
+ * @return the compilation date in unix date format
+ */
+ public static String getDate() {
+ return version != null ? version.date() : "Unknown";
+ }
+
+ /**
+ * The user that compiled Hive.
+ * @return the username of the user
+ */
+ public static String getUser() {
+ return version != null ? version.user() : "Unknown";
+ }
+
+ /**
+ * Get the subversion URL for the root Hive directory.
+ */
+ public static String getUrl() {
+ return version != null ? version.url() : "Unknown";
+ }
+
+ /**
+ * Get the checksum of the source files from which Hive was
+ * built.
+ **/
+ public static String getSrcChecksum() {
+ return version != null ? version.srcChecksum() : "Unknown";
+ }
+
+ /**
+ * Returns the buildVersion which includes version,
+ * revision, user and date.
+ */
+ public static String getBuildVersion(){
+ return HiveVersionInfo.getVersion() +
+ " from " + HiveVersionInfo.getRevision() +
+ " by " + HiveVersionInfo.getUser() +
+ " source checksum " + HiveVersionInfo.getSrcChecksum();
+ }
+
+ public static void main(String[] args) {
+ LOG.debug("version: "+ version);
+ System.out.println("Hive " + getVersion());
+ System.out.println("Subversion " + getUrl() + " -r " + getRevision());
+ System.out.println("Compiled by " + getUser() + " on " + getDate());
+ System.out.println("From source with checksum " + getSrcChecksum());
+ }
+
+}
diff --git common/src/java/org/apache/hive/common/util/ShutdownHookManager.java common/src/java/org/apache/hive/common/util/ShutdownHookManager.java
new file mode 100644
index 0000000..fd2f20a
--- /dev/null
+++ common/src/java/org/apache/hive/common/util/ShutdownHookManager.java
@@ -0,0 +1,205 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * The ShutdownHookManager enables running shutdownHook
+ * in a deterministic order, higher priority first.
+ *
+ * The JVM runs ShutdownHooks in a non-deterministic order or in parallel.
+ * This class registers a single JVM shutdownHook and run all the
+ * shutdownHooks registered to it (to this class) in order based on their
+ * priority.
+ *
+ * Originally taken from o.a.hadoop.util.ShutdownHookManager
+ */
+public class ShutdownHookManager {
+
+ private static final ShutdownHookManager MGR = new ShutdownHookManager();
+
+ private static final Log LOG = LogFactory.getLog(ShutdownHookManager.class);
+
+ static {
+ Runtime.getRuntime().addShutdownHook(
+ new Thread() {
+ @Override
+ public void run() {
+ MGR.shutdownInProgress.set(true);
+ for (Runnable hook: MGR.getShutdownHooksInOrder()) {
+ try {
+ hook.run();
+ } catch (Throwable ex) {
+ LOG.warn("ShutdownHook '" + hook.getClass().getSimpleName() +
+ "' failed, " + ex.toString(), ex);
+ }
+ }
+ }
+ }
+ );
+ }
+
+
+ /**
+ * Private structure to store ShutdownHook and its priority.
+ */
+ private static class HookEntry {
+ Runnable hook;
+ int priority;
+
+ public HookEntry(Runnable hook, int priority) {
+ this.hook = hook;
+ this.priority = priority;
+ }
+
+ @Override
+ public int hashCode() {
+ return hook.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ boolean eq = false;
+ if (obj != null) {
+ if (obj instanceof HookEntry) {
+ eq = (hook == ((HookEntry)obj).hook);
+ }
+ }
+ return eq;
+ }
+
+ }
+
+ private final Set hooks =
+ Collections.synchronizedSet(new HashSet());
+
+ private final AtomicBoolean shutdownInProgress = new AtomicBoolean(false);
+
+ //private to constructor to ensure singularity
+ private ShutdownHookManager() {
+ }
+
+ /**
+ * Returns the list of shutdownHooks in order of execution,
+ * Highest priority first.
+ *
+ * @return the list of shutdownHooks in order of execution.
+ */
+ static List getShutdownHooksInOrder() {
+ return MGR.getShutdownHooksInOrderInternal();
+ }
+
+ List getShutdownHooksInOrderInternal() {
+ List list;
+ synchronized (MGR.hooks) {
+ list = new ArrayList(MGR.hooks);
+ }
+ Collections.sort(list, new Comparator() {
+
+ //reversing comparison so highest priority hooks are first
+ @Override
+ public int compare(HookEntry o1, HookEntry o2) {
+ return o2.priority - o1.priority;
+ }
+ });
+ List ordered = new ArrayList();
+ for (HookEntry entry: list) {
+ ordered.add(entry.hook);
+ }
+ return ordered;
+ }
+
+
+ /**
+ * Adds a shutdownHook with a priority, the higher the priority
+ * the earlier will run. ShutdownHooks with same priority run
+ * in a non-deterministic order.
+ *
+ * @param shutdownHook shutdownHook Runnable
+ * @param priority priority of the shutdownHook.
+ */
+ public static void addShutdownHook(Runnable shutdownHook, int priority) {
+ MGR.addShutdownHookInternal(shutdownHook, priority);
+ }
+
+ private void addShutdownHookInternal(Runnable shutdownHook, int priority) {
+ if (shutdownHook == null) {
+ throw new IllegalArgumentException("shutdownHook cannot be NULL");
+ }
+ if (shutdownInProgress.get()) {
+ throw new IllegalStateException("Shutdown in progress, cannot add a shutdownHook");
+ }
+ hooks.add(new HookEntry(shutdownHook, priority));
+ }
+
+ /**
+ * Removes a shutdownHook.
+ *
+ * @param shutdownHook shutdownHook to remove.
+ * @return TRUE if the shutdownHook was registered and removed,
+ * FALSE otherwise.
+ */
+ public static boolean removeShutdownHook(Runnable shutdownHook) {
+ return MGR.removeShutdownHookInternal(shutdownHook);
+ }
+
+ private boolean removeShutdownHookInternal(Runnable shutdownHook) {
+ if (shutdownInProgress.get()) {
+ throw new IllegalStateException("Shutdown in progress, cannot remove a shutdownHook");
+ }
+ return hooks.remove(new HookEntry(shutdownHook, 0));
+ }
+
+ /**
+ * Indicates if a shutdownHook is registered or not.
+ *
+ * @param shutdownHook shutdownHook to check if registered.
+ * @return TRUE/FALSE depending if the shutdownHook is is registered.
+ */
+ public static boolean hasShutdownHook(Runnable shutdownHook) {
+ return MGR.hasShutdownHookInternal(shutdownHook);
+ }
+
+ public boolean hasShutdownHookInternal(Runnable shutdownHook) {
+ return hooks.contains(new HookEntry(shutdownHook, 0));
+ }
+
+ /**
+ * Indicates if shutdown is in progress or not.
+ *
+ * @return TRUE if the shutdown is in progress, otherwise FALSE.
+ */
+ public static boolean isShutdownInProgress() {
+ return MGR.isShutdownInProgressInternal();
+ }
+
+ private boolean isShutdownInProgressInternal() {
+ return shutdownInProgress.get();
+ }
+}
diff --git common/src/scripts/saveVersion.sh common/src/scripts/saveVersion.sh
new file mode 100644
index 0000000..a917d4a
--- /dev/null
+++ common/src/scripts/saveVersion.sh
@@ -0,0 +1,74 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This file is used to generate the package-info.java class that
+# records the version, revision, branch, user, timestamp, and url
+unset LANG
+unset LC_CTYPE
+unset LC_TIME
+version=$1
+src_dir=$2
+revision=$3
+branch=$4
+url=$5
+user=`whoami`
+date=`date`
+dir=`pwd`
+cwd=`dirname $dir`
+if [ "$revision" = "" ]; then
+ if git rev-parse HEAD 2>/dev/null > /dev/null ; then
+ revision=`git log -1 --pretty=format:"%H" ../`
+ hostname=`hostname`
+ branch=`git branch | sed -n -e 's/^* //p'`
+ url="git://${hostname}${cwd}"
+ elif [ -d .svn ]; then
+ revision=`svn info ../ | sed -n -e 's/Last Changed Rev: \(.*\)/\1/p'`
+ url=`svn info ../ | sed -n -e 's/^URL: \(.*\)/\1/p'`
+ # Get canonical branch (branches/X, tags/X, or trunk)
+ branch=`echo $url | sed -n -e 's,.*\(branches/.*\)$,\1,p' \
+ -e 's,.*\(tags/.*\)$,\1,p' \
+ -e 's,.*trunk$,trunk,p'`
+ else
+ revision="Unknown"
+ branch="Unknown"
+ url="file://$cwd"
+ fi
+fi
+if [ "$branch" = "" ]; then
+ branch="Unknown"
+fi
+if [ "$url" = "" ]; then
+ url="file://$cwd"
+fi
+
+srcChecksum=`find ../ -name '*.java' | grep -v generated-sources | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
+
+mkdir -p $src_dir/gen/org/apache/hive/common
+cat << EOF | \
+ sed -e "s/VERSION/$version/" -e "s/USER/$user/" -e "s/DATE/$date/" \
+ -e "s|URL|$url|" -e "s/REV/$revision/" \
+ -e "s|BRANCH|$branch|" -e "s/SRCCHECKSUM/$srcChecksum/" \
+ > $src_dir/gen/org/apache/hive/common/package-info.java
+/*
+ * Generated by saveVersion.sh
+ */
+@HiveVersionAnnotation(version="VERSION", revision="REV", branch="BRANCH",
+ user="USER", date="DATE", url="URL",
+ srcChecksum="SRCCHECKSUM")
+package org.apache.hive.common;
+EOF
diff --git common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java
new file mode 100644
index 0000000..fa30f15
--- /dev/null
+++ common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * TestShutdownHookManager.
+ *
+ * Originally taken from o.a.hadoop.util.TestShutdownHookManager
+ */
+public class TestShutdownHookManager {
+
+ @Test
+ public void shutdownHookManager() {
+ Assert.assertEquals(0, ShutdownHookManager.getShutdownHooksInOrder().size());
+ Runnable hook1 = new Runnable() {
+ @Override
+ public void run() {
+ }
+ };
+ Runnable hook2 = new Runnable() {
+ @Override
+ public void run() {
+ }
+ };
+
+ ShutdownHookManager.addShutdownHook(hook1, 0);
+ Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1));
+ Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size());
+ Assert.assertEquals(hook1, ShutdownHookManager.getShutdownHooksInOrder().get(0));
+ ShutdownHookManager.removeShutdownHook(hook1);
+ Assert.assertFalse(ShutdownHookManager.hasShutdownHook(hook1));
+
+ ShutdownHookManager.addShutdownHook(hook1, 0);
+ Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1));
+ Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size());
+ Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1));
+ Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size());
+
+ ShutdownHookManager.addShutdownHook(hook2, 1);
+ Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1));
+ Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook2));
+ Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size());
+ Assert.assertEquals(hook2, ShutdownHookManager.getShutdownHooksInOrder().get(0));
+ Assert.assertEquals(hook1, ShutdownHookManager.getShutdownHooksInOrder().get(1));
+
+ }
+}
diff --git conf/hive-default.xml.template conf/hive-default.xml.template
index abbab99..2aab282 100644
--- conf/hive-default.xml.template
+++ conf/hive-default.xml.template
@@ -1120,7 +1120,7 @@
hive.profiler.retries.wait3000
- The base waiting window (in milliseconds) before the next retry. The actual wait time is calculated by baseWindow * failues + baseWindow * (failure + 1) * (random number between [0.0,1.0]).
+ The base waiting window (in milliseconds) before the next retry. The actual wait time is calculated by baseWindow * failues baseWindow * (failure 1) * (random number between [0.0,1.0]).
@@ -1174,7 +1174,7 @@
hive.stats.retries.wait3000
- The base waiting window (in milliseconds) before the next retry. The actual wait time is calculated by baseWindow * failues + baseWindow * (failure + 1) * (random number between [0.0,1.0]).
+ The base waiting window (in milliseconds) before the next retry. The actual wait time is calculated by baseWindow * failues baseWindow * (failure 1) * (random number between [0.0,1.0]).
@@ -1676,7 +1676,7 @@
Currently the query should be single sourced not having any subquery and should not have
any aggregations or distincts (which incurrs RS), lateral views and joins.
1. minimal : SELECT STAR, FILTER on partition columns, LIMIT only
- 2. more : SELECT, FILTER, LIMIT only (+TABLESAMPLE, virtual columns)
+ 2. more : SELECT, FILTER, LIMIT only (TABLESAMPLE, virtual columns)
@@ -1692,7 +1692,6 @@
The number of miliseconds between HMSHandler retry attempts
-
hive.server.read.socket.timeout10
@@ -1765,5 +1764,92 @@
falseWhether to enable using Column Position Alias in Group By or Order By
+
+
+ hive.server2.thrift.min.worker.threads
+ 5
+ Minimum number of Thrift worker threads
+
+
+
+ hive.server2.thrift.max.worker.threads
+ 100
+ Maximum number of Thrift worker threads
+
+
+
+ hive.server2.thrift.port
+ 10000
+ Port number of HiveServer2 Thrift interface.
+ Can be overridden by setting $HIVE_SERVER2_THRIFT_PORT
+
+
+
+ hive.server2.thrift.bind.host
+ localhost
+ Bind host on which to run the HiveServer2 Thrift interface.
+ Can be overridden by setting $HIVE_SERVER2_THRIFT_BIND_HOST
+
+
+
+ hive.server2.authentication
+ NONE
+
+ Client authentication types.
+ NONE: no authentication check
+ LDAP: LDAP/AD based authentication
+ KERBEROS: Kerberos/GSSAPI authentication
+ CUSTOM: Custom authentication provider
+ (Use with property hive.server2.custom.authentication.class)
+
+
+
+
+ hive.server2.custom.authentication.class
+
+
+ Custom authentication class. Used when property
+ 'hive.server2.authentication' is set to 'CUSTOM'. Provided class
+ must be a proper implementation of the interface
+ org.apache.hive.service.auth.PasswdAuthenticationProvider. HiveServer2
+ will call its Authenticate(user, passed) method to authenticate requests.
+ The implementation may optionally extend the Hadoop's
+ org.apache.hadoop.conf.Configured class to grab Hive's Configuration object.
+
+
+
+
+ >hive.server2.authentication.kerberos.principal
+
+
+ Kerberos server principal
+
+
+
+
+ >hive.server2.authentication.kerberos.keytab
+
+
+ Kerberos keytab file for server principal
+
+
+
+
+ hive.server2.authentication.ldap.url
+
+
+ LDAP connection URL
+
+
+
+
+
+ hive.server2.authentication.ldap.baseDN
+
+
+ LDAP base DN
+
+
+
diff --git contrib/build.xml contrib/build.xml
index 277c985..5d33d83 100644
--- contrib/build.xml
+++ contrib/build.xml
@@ -49,7 +49,8 @@
-
-->
-
-
+
diff --git data/files/types/primitives/090101.txt data/files/types/primitives/090101.txt
new file mode 100644
index 0000000..35041d5
--- /dev/null
+++ data/files/types/primitives/090101.txt
@@ -0,0 +1,25 @@
+0,true,0,0,0,0,0.0,0.0,01/01/09,0,2009-01-01 00:00:00.0
+1,\N,1,1,1,10,1.1,10.1,01/01/09,1,2009-01-01 00:01:00.0
+2,true,\N,2,2,20,2.2,20.2,01/01/09,2,2009-01-01 00:02:00.10
+3,false,3,\N,3,30,3.3,30.299999999999997,01/01/09,3,2009-01-01 00:03:00.30
+4,true,4,4,\N,40,4.4,40.4,01/01/09,4,2009-01-01 00:04:00.60
+5,false,5,5,5,\N,5.5,50.5,01/01/09,5,2009-01-01 00:05:00.100
+6,true,6,6,6,60,\N,60.599999999999994,01/01/09,6,2009-01-01 00:06:00.150
+7,false,7,7,7,70,7.7,\N,01/01/09,7,2009-01-01 00:07:00.210
+8,true,8,8,8,80,8.8,80.8,\N,8,2009-01-01 00:08:00.280
+9,false,9,9,9,90,9.9,90.89999999999999,01/01/09,\N,2009-01-01 00:09:00.360
+10,true,0,0,0,0,0.0,0.0,01/02/09,0,\N
+11,\N,1,1,1,10,1.1,10.1,01/02/09,1,2009-01-02 00:11:00.450
+12,true,\N,2,2,20,2.2,20.2,01/02/09,2,2009-01-02 00:12:00.460
+13,false,3,\N,3,30,3.3,30.299999999999997,01/02/09,3,2009-01-02 00:13:00.480
+14,true,4,4,\N,40,4.4,40.4,01/02/09,4,2009-01-02 00:14:00.510
+15,false,5,5,5,\N,5.5,50.5,01/02/09,5,2009-01-02 00:15:00.550
+16,true,6,6,6,60,\N,60.599999999999994,01/02/09,6,2009-01-02 00:16:00.600
+17,false,7,7,7,70,7.7,\N,01/02/09,7,2009-01-02 00:17:00.660
+18,true,8,8,8,80,8.8,80.8,\N,8,2009-01-02 00:18:00.730
+19,false,9,9,9,90,9.9,90.89999999999999,01/02/09,\N,2009-01-02 00:19:00.810
+20,true,0,0,0,0,0.0,0.0,01/03/09,0,\N
+21,\N,1,1,1,10,1.1,10.1,01/03/09,1,2009-01-03 00:21:00.900
+22,true,\N,2,2,20,2.2,20.2,01/03/09,2,2009-01-03 00:22:00.910
+23,false,3,\N,3,30,3.3,30.299999999999997,01/03/09,3,2009-01-03 00:23:00.930
+24,true,4,4,\N,40,4.4,40.4,01/03/09,4,2009-01-03 00:24:00.960
diff --git data/files/types/primitives/090201.txt data/files/types/primitives/090201.txt
new file mode 100644
index 0000000..f2a21f7
--- /dev/null
+++ data/files/types/primitives/090201.txt
@@ -0,0 +1,25 @@
+25,false,0,0,0,\N,0.0,0.0,02/01/09,0,2009-02-01 00:00:00.0
+26,true,1,1,1,10,\N,10.1,02/01/09,1,2009-02-01 00:01:00.0
+27,false,2,2,2,20,2.2,\N,02/01/09,2,2009-02-01 00:02:00.10
+28,true,3,3,3,30,3.3,\N,02/01/09,3,2009-02-01 00:03:00.30
+29,false,4,4,4,40,4.4,40.4,\N,4,2009-02-01 00:04:00.60
+30,true,5,5,5,50,5.5,50.5,\N,5,2009-02-01 00:05:00.100
+31,false,6,6,6,60,6.6,60.599999999999994,02/01/09,\N,2009-02-01 00:06:00.150
+32,true,7,7,7,70,7.7,70.7,02/01/09,7,\N
+33,\N,8,8,8,80,8.8,80.8,02/01/09,8,2009-02-01 00:08:00.280
+34,true,\N,9,9,90,9.9,90.89999999999999,02/01/09,9,2009-02-01 00:09:00.360
+35,false,0,\N,0,0,0.0,0.0,02/02/09,0,2009-02-02 00:10:00.450
+36,true,1,1,\N,10,1.1,10.1,02/02/09,1,2009-02-02 00:11:00.450
+37,false,2,2,2,\N,2.2,20.2,02/02/09,2,2009-02-02 00:12:00.460
+38,true,3,3,3,30,\N,30.299999999999997,02/02/09,3,2009-02-02 00:13:00.480
+39,false,4,4,4,40,4.4,\N,02/02/09,4,2009-02-02 00:14:00.510
+40,true,5,5,5,50,5.5,50.5,\N,5,2009-02-02 00:15:00.550
+41,false,6,6,6,60,6.6,60.599999999999994,02/02/09,\N,2009-02-02 00:16:00.600
+42,true,7,7,7,70,7.7,70.7,02/02/09,7,\N
+43,\N,8,8,8,80,8.8,80.8,02/02/09,8,2009-02-02 00:18:00.730
+44,true,\N,9,9,90,9.9,90.89999999999999,02/02/09,9,2009-02-02 00:19:00.810
+45,false,0,\N,0,0,0.0,0.0,02/03/09,0,2009-02-03 00:20:00.900
+46,true,1,1,\N,10,1.1,10.1,02/03/09,1,2009-02-03 00:21:00.900
+47,false,2,2,2,\N,2.2,20.2,02/03/09,2,2009-02-03 00:22:00.910
+48,true,3,3,3,30,\N,30.299999999999997,02/03/09,3,2009-02-03 00:23:00.930
+49,false,4,4,4,40,4.4,\N,02/03/09,4,2009-02-03 00:24:00.960
diff --git data/files/types/primitives/090301.txt data/files/types/primitives/090301.txt
new file mode 100644
index 0000000..7f475f9
--- /dev/null
+++ data/files/types/primitives/090301.txt
@@ -0,0 +1,25 @@
+50,true,0,0,0,0,0.0,0.0,\N,0,2009-03-01 00:00:00.0
+51,false,1,1,1,10,1.1,10.1,03/01/09,\N,2009-03-01 00:01:00.0
+52,true,2,2,2,20,2.2,20.2,03/01/09,2,\N
+53,\N,3,3,3,30,3.3,30.299999999999997,03/01/09,3,2009-03-01 00:03:00.30
+54,true,\N,4,4,40,4.4,40.4,03/01/09,4,2009-03-01 00:04:00.60
+55,false,5,\N,5,50,5.5,50.5,03/01/09,5,2009-03-01 00:05:00.100
+56,true,6,6,\N,60,6.6,60.599999999999994,03/01/09,6,2009-03-01 00:06:00.150
+57,false,7,7,7,\N,7.7,70.7,03/01/09,7,2009-03-01 00:07:00.210
+58,true,8,8,8,80,\N,80.8,03/01/09,8,2009-03-01 00:08:00.280
+59,false,9,9,9,90,9.9,\N,03/01/09,9,2009-03-01 00:09:00.360
+60,true,0,0,0,0,0.0,0.0,\N,0,2009-03-02 00:10:00.450
+61,false,1,1,1,10,1.1,10.1,03/02/09,\N,2009-03-02 00:11:00.450
+62,true,2,2,2,20,2.2,20.2,03/02/09,2,\N
+63,\N,3,3,3,30,3.3,30.299999999999997,03/02/09,3,2009-03-02 00:13:00.480
+64,true,\N,4,4,40,4.4,40.4,03/02/09,4,2009-03-02 00:14:00.510
+65,false,5,\N,5,50,5.5,50.5,03/02/09,5,2009-03-02 00:15:00.550
+66,true,6,6,\N,60,6.6,60.599999999999994,03/02/09,6,2009-03-02 00:16:00.600
+67,false,7,7,7,\N,7.7,70.7,03/02/09,7,2009-03-02 00:17:00.660
+68,true,8,8,8,80,\N,80.8,03/02/09,8,2009-03-02 00:18:00.730
+69,false,9,9,9,90,9.9,\N,03/02/09,9,2009-03-02 00:19:00.810
+70,true,0,0,0,0,0.0,0.0,\N,0,2009-03-03 00:20:00.900
+71,false,1,1,1,10,1.1,10.1,03/03/09,\N,2009-03-03 00:21:00.900
+72,true,2,2,2,20,2.2,20.2,03/03/09,2,\N
+73,\N,3,3,3,30,3.3,30.299999999999997,03/03/09,3,2009-03-03 00:23:00.930
+74,true,\N,4,4,40,4.4,40.4,03/03/09,4,2009-03-03 00:24:00.960
diff --git data/files/types/primitives/090401.txt data/files/types/primitives/090401.txt
new file mode 100644
index 0000000..a3a0d69
--- /dev/null
+++ data/files/types/primitives/090401.txt
@@ -0,0 +1,25 @@
+75,false,0,\N,0,0,0.0,0.0,04/01/09,0,2009-04-01 00:00:00.0
+76,true,1,1,\N,10,1.1,10.1,04/01/09,1,2009-04-01 00:01:00.0
+77,false,2,2,2,\N,2.2,20.2,04/01/09,2,2009-04-01 00:02:00.10
+78,true,3,3,3,30,\N,30.299999999999997,04/01/09,3,2009-04-01 00:03:00.30
+79,false,4,4,4,40,4.4,\N,04/01/09,4,2009-04-01 00:04:00.60
+80,true,5,5,5,50,5.5,50.5,\N,5,2009-04-01 00:05:00.100
+81,false,6,6,6,60,6.6,60.599999999999994,04/01/09,\N,2009-04-01 00:06:00.150
+82,true,7,7,7,70,7.7,70.7,04/01/09,7,\N
+83,\N,8,8,8,80,8.8,80.8,04/01/09,8,2009-04-01 00:08:00.280
+84,true,\N,9,9,90,9.9,90.89999999999999,04/01/09,9,2009-04-01 00:09:00.360
+85,false,0,\N,0,0,0.0,0.0,04/02/09,0,2009-04-02 00:10:00.450
+86,true,1,1,\N,10,1.1,10.1,04/02/09,1,2009-04-02 00:11:00.450
+87,false,2,2,2,\N,2.2,20.2,04/02/09,2,2009-04-02 00:12:00.460
+88,true,3,3,3,30,\N,30.299999999999997,04/02/09,3,2009-04-02 00:13:00.480
+89,false,4,4,4,40,4.4,\N,04/02/09,4,2009-04-02 00:14:00.510
+90,true,5,5,5,50,5.5,50.5,\N,5,2009-04-02 00:15:00.550
+91,false,6,6,6,60,6.6,60.599999999999994,04/02/09,\N,2009-04-02 00:16:00.600
+92,true,7,7,7,70,7.7,70.7,04/02/09,7,\N
+93,\N,8,8,8,80,8.8,80.8,04/02/09,8,2009-04-02 00:18:00.730
+94,true,\N,9,9,90,9.9,90.89999999999999,04/02/09,9,2009-04-02 00:19:00.810
+95,false,0,\N,0,0,0.0,0.0,04/03/09,0,2009-04-03 00:20:00.900
+96,true,1,1,\N,10,1.1,10.1,04/03/09,1,2009-04-03 00:21:00.900
+97,false,2,2,2,\N,2.2,20.2,04/03/09,2,2009-04-03 00:22:00.910
+98,true,3,3,3,30,\N,30.299999999999997,04/03/09,3,2009-04-03 00:23:00.930
+99,false,4,4,4,40,4.4,\N,04/03/09,4,2009-04-03 00:24:00.960
diff --git data/scripts/q_test_cleanup.sql data/scripts/q_test_cleanup.sql
new file mode 100644
index 0000000..31bd720
--- /dev/null
+++ data/scripts/q_test_cleanup.sql
@@ -0,0 +1,10 @@
+DROP TABLE IF EXISTS src;
+DROP TABLE IF EXISTS src1;
+DROP TABLE IF EXISTS src_json;
+DROP TABLE IF EXISTS src_sequencefile;
+DROP TABLE IF EXISTS src_thrift;
+DROP TABLE IF EXISTS srcbucket;
+DROP TABLE IF EXISTS srcbucket2;
+DROP TABLE IF EXISTS srcpart;
+DROP TABLE IF EXISTS primitives;
+
diff --git data/scripts/q_test_init.sql data/scripts/q_test_init.sql
new file mode 100644
index 0000000..12afdf3
--- /dev/null
+++ data/scripts/q_test_init.sql
@@ -0,0 +1,132 @@
+--
+-- Table src
+--
+DROP TABLE IF EXISTS src;
+
+CREATE TABLE src (key STRING, value STRING) STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src;
+
+--
+-- Table src1
+--
+DROP TABLE IF EXISTS src1;
+
+CREATE TABLE src1 (key STRING, value STRING) STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv3.txt" INTO TABLE src1;
+
+--
+-- Table src_json
+--
+DROP TABLE IF EXISTS src_json;
+
+CREATE TABLE src_json (json STRING) STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/json.txt" INTO TABLE src_json;
+
+
+--
+-- Table src_sequencefile
+--
+DROP TABLE IF EXISTS src_sequencefile;
+
+CREATE TABLE src_sequencefile (key STRING, value STRING) STORED AS SEQUENCEFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.seq" INTO TABLE src_sequencefile;
+
+
+--
+-- Table src_thrift
+--
+DROP TABLE IF EXISTS src_thrift;
+
+CREATE TABLE src_thrift
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer'
+WITH SERDEPROPERTIES (
+ 'serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex',
+ 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol')
+STORED AS SEQUENCEFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/complex.seq" INTO TABLE src_thrift;
+
+
+--
+-- Table srcbucket
+--
+DROP TABLE IF EXISTS srcbucket;
+
+CREATE TABLE srcbucket (key INT, value STRING)
+CLUSTERED BY (key) INTO 2 BUCKETS
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/srcbucket0.txt" INTO TABLE srcbucket;
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/srcbucket1.txt" INTO TABLE srcbucket;
+
+
+--
+-- Table srcbucket2
+--
+DROP TABLE IF EXISTS srcbucket2;
+
+CREATE TABLE srcbucket2 (key INT, value STRING)
+CLUSTERED BY (key) INTO 4 BUCKETS
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/srcbucket20.txt" INTO TABLE srcbucket2;
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/srcbucket21.txt" INTO TABLE srcbucket2;
+
+
+--
+-- Table srcpart
+--
+DROP TABLE IF EXISTS srcpart;
+
+CREATE TABLE srcpart (key STRING, value STRING)
+PARTITIONED BY (ds STRING, hr STRING)
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
+OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="11");
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
+OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="12");
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
+OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="11");
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
+OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="12");
+
+
+DROP TABLE IF EXISTS primitives;
+CREATE TABLE primitives (
+ id INT,
+ bool_col BOOLEAN,
+ tinyint_col TINYINT,
+ smallint_col SMALLINT,
+ int_col INT,
+ bigint_col BIGINT,
+ float_col FLOAT,
+ double_col DOUBLE,
+ date_string_col STRING,
+ string_col STRING,
+ timestamp_col TIMESTAMP)
+PARTITIONED BY (year INT, month INT)
+ROW FORMAT DELIMITED
+ FIELDS TERMINATED BY ','
+ ESCAPED BY '\\'
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090101.txt"
+OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=1);
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090201.txt"
+OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=2);
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090301.txt"
+OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=3);
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090401.txt"
+OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=4);
+
diff --git eclipse-templates/BeeLine.launchtemplate eclipse-templates/BeeLine.launchtemplate
new file mode 100644
index 0000000..799b1e2
--- /dev/null
+++ eclipse-templates/BeeLine.launchtemplate
@@ -0,0 +1,51 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/HiveBeeLine.launchtemplate eclipse-templates/HiveBeeLine.launchtemplate
deleted file mode 100644
index 515fc39..0000000
--- eclipse-templates/HiveBeeLine.launchtemplate
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git eclipse-templates/HiveServer2.launchtemplate eclipse-templates/HiveServer2.launchtemplate
new file mode 100644
index 0000000..10f04ab
--- /dev/null
+++ eclipse-templates/HiveServer2.launchtemplate
@@ -0,0 +1,50 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/TestBeeLineDriver.launchtemplate eclipse-templates/TestBeeLineDriver.launchtemplate
new file mode 100644
index 0000000..36842fd
--- /dev/null
+++ eclipse-templates/TestBeeLineDriver.launchtemplate
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/TestEmbeddedThriftCLIService.launchtemplate eclipse-templates/TestEmbeddedThriftCLIService.launchtemplate
new file mode 100644
index 0000000..2e63e0a
--- /dev/null
+++ eclipse-templates/TestEmbeddedThriftCLIService.launchtemplate
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/TestHiveServer.launchtemplate eclipse-templates/TestHiveServer.launchtemplate
new file mode 100644
index 0000000..a930aa3
--- /dev/null
+++ eclipse-templates/TestHiveServer.launchtemplate
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/TestJdbc2.launchtemplate eclipse-templates/TestJdbc2.launchtemplate
new file mode 100644
index 0000000..cacd7fd
--- /dev/null
+++ eclipse-templates/TestJdbc2.launchtemplate
@@ -0,0 +1,44 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/TestRemoteThriftCLIService.launchtemplate eclipse-templates/TestRemoteThriftCLIService.launchtemplate
new file mode 100644
index 0000000..d9411a5
--- /dev/null
+++ eclipse-templates/TestRemoteThriftCLIService.launchtemplate
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git hbase-handler/build.xml hbase-handler/build.xml
index 8676ca3..8e23a09 100644
--- hbase-handler/build.xml
+++ hbase-handler/build.xml
@@ -47,7 +47,8 @@
-
-
-
+
diff --git ivy/ivysettings.xml ivy/ivysettings.xml
index 28c481e..d230f2c 100644
--- ivy/ivysettings.xml
+++ ivy/ivysettings.xml
@@ -61,7 +61,7 @@
diff --git ivy/libraries.properties ivy/libraries.properties
index 1e5fbef..f5c9684 100644
--- ivy/libraries.properties
+++ ivy/libraries.properties
@@ -37,6 +37,7 @@ commons-compress.version=1.4.1
commons-configuration.version=1.6
commons-dbcp.version=1.4
commons-httpclient.version=3.0.1
+commons-io.version=2.4
commons-lang.version=2.4
commons-logging.version=1.0.4
commons-logging-api.version=1.0.4
@@ -51,8 +52,6 @@ jdo-api.version=2.3-ec
jdom.version=1.1
jetty.version=6.1.26
jline.version=0.9.94
-sqlline.version=1_0_2
-sqlline.branch=1.0.2
json.version=20090211
junit.version=4.10
libfb303.version=0.9.0
@@ -64,6 +63,7 @@ protobuf.version=2.4.1
rat.version=0.8
slf4j-api.version=1.6.1
slf4j-log4j12.version=1.6.1
+tempus-fugit.version=1.1
snappy.version=0.2
velocity.version=1.5
zookeeper.version=3.4.3
diff --git jdbc/ivy.xml jdbc/ivy.xml
index 29777a3..9269bd1 100644
--- jdbc/ivy.xml
+++ jdbc/ivy.xml
@@ -33,8 +33,6 @@
transitive="false"/>
-
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
new file mode 100644
index 0000000..a65499b
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -0,0 +1,1106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
+import java.math.MathContext;
+import java.net.URL;
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.Clob;
+import java.sql.Date;
+import java.sql.NClob;
+import java.sql.Ref;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.RowId;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Statement;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Calendar;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.Type;
+import org.apache.hive.service.cli.thrift.TBoolValue;
+import org.apache.hive.service.cli.thrift.TByteValue;
+import org.apache.hive.service.cli.thrift.TColumnValue;
+import org.apache.hive.service.cli.thrift.TDoubleValue;
+import org.apache.hive.service.cli.thrift.TI16Value;
+import org.apache.hive.service.cli.thrift.TI32Value;
+import org.apache.hive.service.cli.thrift.TI64Value;
+import org.apache.hive.service.cli.thrift.TRow;
+import org.apache.hive.service.cli.thrift.TStringValue;
+
+/**
+ * Data independent base class which implements the common part of
+ * all Hive result sets.
+ */
+public abstract class HiveBaseResultSet implements ResultSet {
+ protected SQLWarning warningChain = null;
+ protected boolean wasNull = false;
+ protected TRow row;
+ protected List columnNames;
+ protected List columnTypes;
+
+ private TableSchema schema;
+
+ public boolean absolute(int row) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void afterLast() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void beforeFirst() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void cancelRowUpdates() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void deleteRow() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int findColumn(String columnName) throws SQLException {
+ int columnIndex = columnNames.indexOf(columnName);
+ if (columnIndex==-1) {
+ throw new SQLException();
+ } else {
+ return ++columnIndex;
+ }
+ }
+
+ public boolean first() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Array getArray(int i) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Array getArray(String colName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public InputStream getAsciiStream(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public InputStream getAsciiStream(String columnName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public BigDecimal getBigDecimal(int columnIndex) throws SQLException {
+ Object val = getObject(columnIndex);
+
+ if (val == null || val instanceof BigDecimal) {
+ return (BigDecimal)val;
+ }
+
+ throw new SQLException("Illegal conversion");
+ }
+
+ public BigDecimal getBigDecimal(String columnName) throws SQLException {
+ return getBigDecimal(findColumn(columnName));
+ }
+
+ public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException {
+ MathContext mc = new MathContext(scale);
+ return getBigDecimal(columnIndex).round(mc);
+ }
+
+ public BigDecimal getBigDecimal(String columnName, int scale) throws SQLException {
+ return getBigDecimal(findColumn(columnName), scale);
+ }
+
+ public InputStream getBinaryStream(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public InputStream getBinaryStream(String columnName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Blob getBlob(int i) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Blob getBlob(String colName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean getBoolean(int columnIndex) throws SQLException {
+ Object obj = getObject(columnIndex);
+ if (Boolean.class.isInstance(obj)) {
+ return (Boolean) obj;
+ } else if (obj == null) {
+ return false;
+ } else if (Number.class.isInstance(obj)) {
+ return ((Number) obj).intValue() != 0;
+ } else if (String.class.isInstance(obj)) {
+ return !((String) obj).equals("0");
+ }
+ throw new SQLException("Cannot convert column " + columnIndex + " to boolean");
+ }
+
+ public boolean getBoolean(String columnName) throws SQLException {
+ return getBoolean(findColumn(columnName));
+ }
+
+ public byte getByte(int columnIndex) throws SQLException {
+ Object obj = getObject(columnIndex);
+ if (Number.class.isInstance(obj)) {
+ return ((Number) obj).byteValue();
+ } else if (obj == null) {
+ return 0;
+ }
+ throw new SQLException("Cannot convert column " + columnIndex + " to byte");
+ }
+
+ public byte getByte(String columnName) throws SQLException {
+ return getByte(findColumn(columnName));
+ }
+
+ public byte[] getBytes(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public byte[] getBytes(String columnName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Reader getCharacterStream(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Reader getCharacterStream(String columnName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Clob getClob(int i) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Clob getClob(String colName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getConcurrency() throws SQLException {
+ return ResultSet.CONCUR_READ_ONLY;
+ }
+
+ public String getCursorName() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Date getDate(int columnIndex) throws SQLException {
+ Object obj = getObject(columnIndex);
+ if (obj == null) {
+ return null;
+ }
+
+ try {
+ return Date.valueOf((String) obj);
+ } catch (Exception e) {
+ throw new SQLException("Cannot convert column " + columnIndex
+ + " to date: " + e.toString());
+ }
+ }
+
+ public Date getDate(String columnName) throws SQLException {
+ return getDate(findColumn(columnName));
+ }
+
+ public Date getDate(int columnIndex, Calendar cal) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Date getDate(String columnName, Calendar cal) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public double getDouble(int columnIndex) throws SQLException {
+ try {
+ Object obj = getObject(columnIndex);
+ if (Number.class.isInstance(obj)) {
+ return ((Number) obj).doubleValue();
+ } else if (obj == null) {
+ return 0;
+ } else if (String.class.isInstance(obj)) {
+ return Double.valueOf((String)obj);
+ }
+ throw new Exception("Illegal conversion");
+ } catch (Exception e) {
+ throw new SQLException("Cannot convert column " + columnIndex
+ + " to double: " + e.toString());
+ }
+ }
+
+ public double getDouble(String columnName) throws SQLException {
+ return getDouble(findColumn(columnName));
+ }
+
+ public int getFetchDirection() throws SQLException {
+ return ResultSet.FETCH_FORWARD;
+ }
+
+ public int getFetchSize() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public float getFloat(int columnIndex) throws SQLException {
+ try {
+ Object obj = getObject(columnIndex);
+ if (Number.class.isInstance(obj)) {
+ return ((Number) obj).floatValue();
+ } else if (obj == null) {
+ return 0;
+ } else if (String.class.isInstance(obj)) {
+ return Float.valueOf((String)obj);
+ }
+ throw new Exception("Illegal conversion");
+ } catch (Exception e) {
+ throw new SQLException("Cannot convert column " + columnIndex
+ + " to float: " + e.toString());
+ }
+ }
+
+ public float getFloat(String columnName) throws SQLException {
+ return getFloat(findColumn(columnName));
+ }
+
+ public int getHoldability() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getInt(int columnIndex) throws SQLException {
+ try {
+ Object obj = getObject(columnIndex);
+ if (Number.class.isInstance(obj)) {
+ return ((Number) obj).intValue();
+ } else if (obj == null) {
+ return 0;
+ } else if (String.class.isInstance(obj)) {
+ return Integer.valueOf((String)obj);
+ }
+ throw new Exception("Illegal conversion");
+ } catch (Exception e) {
+ throw new SQLException("Cannot convert column " + columnIndex + " to integer" + e.toString());
+ }
+ }
+
+ public int getInt(String columnName) throws SQLException {
+ return getInt(findColumn(columnName));
+ }
+
+ public long getLong(int columnIndex) throws SQLException {
+ try {
+ Object obj = getObject(columnIndex);
+ if (Number.class.isInstance(obj)) {
+ return ((Number) obj).longValue();
+ } else if (obj == null) {
+ return 0;
+ } else if (String.class.isInstance(obj)) {
+ return Long.valueOf((String)obj);
+ }
+ throw new Exception("Illegal conversion");
+ } catch (Exception e) {
+ throw new SQLException("Cannot convert column " + columnIndex + " to long: " + e.toString());
+ }
+ }
+
+ public long getLong(String columnName) throws SQLException {
+ return getLong(findColumn(columnName));
+ }
+
+ public ResultSetMetaData getMetaData() throws SQLException {
+ return new HiveResultSetMetaData(columnNames, columnTypes);
+ }
+
+ public Reader getNCharacterStream(int arg0) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Reader getNCharacterStream(String arg0) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public NClob getNClob(int arg0) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public NClob getNClob(String columnLabel) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getNString(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getNString(String columnLabel) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ private Boolean getBooleanValue(TBoolValue tBoolValue) {
+ if (tBoolValue.isSetValue()) {
+ wasNull = false;
+ return tBoolValue.isValue();
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private Byte getByteValue(TByteValue tByteValue) {
+ if (tByteValue.isSetValue()) {
+ wasNull = false;
+ return tByteValue.getValue();
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private Short getShortValue(TI16Value tI16Value) {
+ if (tI16Value.isSetValue()) {
+ wasNull = false;
+ return tI16Value.getValue();
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private Integer getIntegerValue(TI32Value tI32Value) {
+ if (tI32Value.isSetValue()) {
+ wasNull = false;
+ return tI32Value.getValue();
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private Long getLongValue(TI64Value tI64Value) {
+ if (tI64Value.isSetValue()) {
+ wasNull = false;
+ return tI64Value.getValue();
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private Double getDoubleValue(TDoubleValue tDoubleValue) {
+ if (tDoubleValue.isSetValue()) {
+ wasNull = false;
+ return tDoubleValue.getValue();
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private String getStringValue(TStringValue tStringValue) {
+ if (tStringValue.isSetValue()) {
+ wasNull = false;
+ return tStringValue.getValue();
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private Timestamp getTimestampValue(TStringValue tStringValue) {
+ if (tStringValue.isSetValue()) {
+ wasNull = false;
+ return Timestamp.valueOf(tStringValue.getValue());
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private BigDecimal getBigDecimalValue(TStringValue tStringValue) {
+ if (tStringValue.isSetValue()) {
+ wasNull = false;
+ return new BigDecimal(tStringValue.getValue());
+ }
+ wasNull = true;
+ return null;
+ }
+
+ private Object getColumnValue(int columnIndex) throws SQLException {
+ if (row == null) {
+ throw new SQLException("No row found.");
+ }
+ List colVals = row.getColVals();
+ if (colVals == null) {
+ throw new SQLException("RowSet does not contain any columns!");
+ }
+ if (columnIndex > colVals.size()) {
+ throw new SQLException("Invalid columnIndex: " + columnIndex);
+ }
+
+ TColumnValue tColumnValue = colVals.get(columnIndex - 1);
+ Type columnType = getSchema().getColumnDescriptorAt(columnIndex - 1).getType();
+
+ switch (columnType) {
+ case BOOLEAN_TYPE:
+ return getBooleanValue(tColumnValue.getBoolVal());
+ case TINYINT_TYPE:
+ return getByteValue(tColumnValue.getByteVal());
+ case SMALLINT_TYPE:
+ return getShortValue(tColumnValue.getI16Val());
+ case INT_TYPE:
+ return getIntegerValue(tColumnValue.getI32Val());
+ case BIGINT_TYPE:
+ return getLongValue(tColumnValue.getI64Val());
+ case FLOAT_TYPE:
+ return getDoubleValue(tColumnValue.getDoubleVal());
+ case DOUBLE_TYPE:
+ return getDoubleValue(tColumnValue.getDoubleVal());
+ case STRING_TYPE:
+ return getStringValue(tColumnValue.getStringVal());
+ case TIMESTAMP_TYPE:
+ return getTimestampValue(tColumnValue.getStringVal());
+ case DECIMAL_TYPE:
+ return getBigDecimalValue(tColumnValue.getStringVal());
+ default:
+ throw new SQLException("Unrecognized column type:" + columnType);
+ }
+
+ /*
+ switch (tColumnValue.getSetField()) {
+ case BOOL_VAL:
+ return getBooleanValue(tColumnValue.getBoolVal());
+ case BYTE_VAL:
+ return getByteValue(tColumnValue.getByteVal());
+ case I16_VAL:
+ return getShortValue(tColumnValue.getI16Val());
+ case I32_VAL:
+ return getIntegerValue(tColumnValue.getI32Val());
+ case I64_VAL:
+ return getLongValue(tColumnValue.getI64Val());
+ case DOUBLE_VAL:
+ return getDoubleValue(tColumnValue.getDoubleVal());
+ case STRING_VAL:
+ return getStringValue(tColumnValue.getStringVal());
+ default:
+ throw new SQLException("Unrecognized column type:" + tColumnValue.getSetField());
+ }
+ */
+ }
+
+ public Object getObject(int columnIndex) throws SQLException {
+ return getColumnValue(columnIndex);
+ }
+
+ public Object getObject(String columnName) throws SQLException {
+ return getObject(findColumn(columnName));
+ }
+
+ public Object getObject(int i, Map> map) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Object getObject(String colName, Map> map) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Ref getRef(int i) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Ref getRef(String colName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getRow() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public RowId getRowId(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public RowId getRowId(String columnLabel) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public SQLXML getSQLXML(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public SQLXML getSQLXML(String columnLabel) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public short getShort(int columnIndex) throws SQLException {
+ try {
+ Object obj = getObject(columnIndex);
+ if (Number.class.isInstance(obj)) {
+ return ((Number) obj).shortValue();
+ } else if (obj == null) {
+ return 0;
+ } else if (String.class.isInstance(obj)) {
+ return Short.valueOf((String)obj);
+ }
+ throw new Exception("Illegal conversion");
+ } catch (Exception e) {
+ throw new SQLException("Cannot convert column " + columnIndex
+ + " to short: " + e.toString());
+ }
+ }
+
+ public short getShort(String columnName) throws SQLException {
+ return getShort(findColumn(columnName));
+ }
+
+ public Statement getStatement() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /**
+ * @param columnIndex - the first column is 1, the second is 2, ...
+ * @see java.sql.ResultSet#getString(int)
+ */
+ public String getString(int columnIndex) throws SQLException {
+ Object value = getColumnValue(columnIndex);
+ if (wasNull) {
+ return null;
+ }
+ return value.toString();
+ }
+
+ public String getString(String columnName) throws SQLException {
+ return getString(findColumn(columnName));
+ }
+
+ public Time getTime(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Time getTime(String columnName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Time getTime(int columnIndex, Calendar cal) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Time getTime(String columnName, Calendar cal) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Timestamp getTimestamp(int columnIndex) throws SQLException {
+ Object obj = getObject(columnIndex);
+ if (obj == null) {
+ return null;
+ }
+ if (obj instanceof Timestamp) {
+ return (Timestamp) obj;
+ }
+ if (obj instanceof String) {
+ return Timestamp.valueOf((String)obj);
+ }
+ throw new SQLException("Illegal conversion");
+ }
+
+ public Timestamp getTimestamp(String columnName) throws SQLException {
+ return getTimestamp(findColumn(columnName));
+ }
+
+ public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public Timestamp getTimestamp(String columnName, Calendar cal) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getType() throws SQLException {
+ return ResultSet.TYPE_FORWARD_ONLY;
+ }
+
+ public URL getURL(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public URL getURL(String columnName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public InputStream getUnicodeStream(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public InputStream getUnicodeStream(String columnName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void insertRow() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isAfterLast() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isBeforeFirst() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isClosed() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isFirst() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isLast() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean last() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void moveToCurrentRow() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void moveToInsertRow() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean previous() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void refreshRow() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean relative(int rows) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean rowDeleted() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean rowInserted() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean rowUpdated() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void setFetchDirection(int direction) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void setFetchSize(int rows) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateArray(int columnIndex, Array x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateArray(String columnName, Array x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateAsciiStream(int columnIndex, InputStream x, int length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateAsciiStream(String columnName, InputStream x, int length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateAsciiStream(int columnIndex, InputStream x, long length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateAsciiStream(String columnLabel, InputStream x, long length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBigDecimal(String columnName, BigDecimal x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBinaryStream(int columnIndex, InputStream x, int length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBinaryStream(String columnName, InputStream x, int length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBinaryStream(int columnIndex, InputStream x, long length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBinaryStream(String columnLabel, InputStream x, long length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBlob(int columnIndex, Blob x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBlob(String columnName, Blob x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBlob(int columnIndex, InputStream inputStream, long length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBlob(String columnLabel, InputStream inputStream,
+ long length) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBoolean(int columnIndex, boolean x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBoolean(String columnName, boolean x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateByte(int columnIndex, byte x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateByte(String columnName, byte x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBytes(int columnIndex, byte[] x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateBytes(String columnName, byte[] x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateCharacterStream(int columnIndex, Reader x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateCharacterStream(int columnIndex, Reader x, int length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateCharacterStream(String columnName, Reader reader, int length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateCharacterStream(int columnIndex, Reader x, long length)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateCharacterStream(String columnLabel, Reader reader,
+ long length) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateClob(int columnIndex, Clob x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateClob(String columnName, Clob x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateClob(int columnIndex, Reader reader) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateClob(String columnLabel, Reader reader) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateClob(int columnIndex, Reader reader, long length) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateClob(String columnLabel, Reader reader, long length) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateDate(int columnIndex, Date x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateDate(String columnName, Date x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateDouble(int columnIndex, double x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateDouble(String columnName, double x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateFloat(int columnIndex, float x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateFloat(String columnName, float x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateInt(int columnIndex, int x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateInt(String columnName, int x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateLong(int columnIndex, long x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateLong(String columnName, long x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNCharacterStream(String columnLabel, Reader reader,
+ long length) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNClob(int columnIndex, NClob clob) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNClob(String columnLabel, NClob clob) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNClob(int columnIndex, Reader reader) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNClob(String columnLabel, Reader reader) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNString(int columnIndex, String string) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNString(String columnLabel, String string) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNull(int columnIndex) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateNull(String columnName) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateObject(int columnIndex, Object x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateObject(String columnName, Object x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateObject(int columnIndex, Object x, int scale) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateObject(String columnName, Object x, int scale) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateRef(int columnIndex, Ref x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateRef(String columnName, Ref x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateRow() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateRowId(int columnIndex, RowId x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateRowId(String columnLabel, RowId x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateShort(int columnIndex, short x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateShort(String columnName, short x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateString(int columnIndex, String x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateString(String columnName, String x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateTime(int columnIndex, Time x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateTime(String columnName, Time x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public void updateTimestamp(String columnName, Timestamp x) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public SQLWarning getWarnings() throws SQLException {
+ return warningChain;
+ }
+
+ public void clearWarnings() throws SQLException {
+ warningChain = null;
+ }
+
+ public void close() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean wasNull() throws SQLException {
+ return wasNull;
+ }
+
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public T unwrap(Class iface) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ protected void setSchema(TableSchema schema) {
+ this.schema = schema;
+ }
+
+ protected TableSchema getSchema() {
+ return schema;
+ }
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java
new file mode 100644
index 0000000..a07efca
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java
@@ -0,0 +1,2442 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
+import java.net.URL;
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.Clob;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.NClob;
+import java.sql.ParameterMetaData;
+import java.sql.Ref;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.RowId;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Calendar;
+import java.util.Map;
+
+/**
+ * HiveCallableStatement.
+ *
+ */
+public class HiveCallableStatement implements java.sql.CallableStatement {
+
+ /**
+ *
+ */
+ public HiveCallableStatement() {
+ // TODO Auto-generated constructor stub
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getArray(int)
+ */
+
+ public Array getArray(int i) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getArray(java.lang.String)
+ */
+
+ public Array getArray(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBigDecimal(int)
+ */
+
+ public BigDecimal getBigDecimal(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBigDecimal(java.lang.String)
+ */
+
+ public BigDecimal getBigDecimal(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBigDecimal(int, int)
+ */
+
+ public BigDecimal getBigDecimal(int parameterIndex, int scale) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBlob(int)
+ */
+
+ public Blob getBlob(int i) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBlob(java.lang.String)
+ */
+
+ public Blob getBlob(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBoolean(int)
+ */
+
+ public boolean getBoolean(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBoolean(java.lang.String)
+ */
+
+ public boolean getBoolean(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getByte(int)
+ */
+
+ public byte getByte(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getByte(java.lang.String)
+ */
+
+ public byte getByte(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBytes(int)
+ */
+
+ public byte[] getBytes(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getBytes(java.lang.String)
+ */
+
+ public byte[] getBytes(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getCharacterStream(int)
+ */
+
+ public Reader getCharacterStream(int arg0) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getCharacterStream(java.lang.String)
+ */
+
+ public Reader getCharacterStream(String arg0) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getClob(int)
+ */
+
+ public Clob getClob(int i) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getClob(java.lang.String)
+ */
+
+ public Clob getClob(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getDate(int)
+ */
+
+ public Date getDate(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getDate(java.lang.String)
+ */
+
+ public Date getDate(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getDate(int, java.util.Calendar)
+ */
+
+ public Date getDate(int parameterIndex, Calendar cal) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getDate(java.lang.String,
+ * java.util.Calendar)
+ */
+
+ public Date getDate(String parameterName, Calendar cal) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getDouble(int)
+ */
+
+ public double getDouble(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getDouble(java.lang.String)
+ */
+
+ public double getDouble(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getFloat(int)
+ */
+
+ public float getFloat(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getFloat(java.lang.String)
+ */
+
+ public float getFloat(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getInt(int)
+ */
+
+ public int getInt(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getInt(java.lang.String)
+ */
+
+ public int getInt(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getLong(int)
+ */
+
+ public long getLong(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getLong(java.lang.String)
+ */
+
+ public long getLong(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getNCharacterStream(int)
+ */
+
+ public Reader getNCharacterStream(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getNCharacterStream(java.lang.String)
+ */
+
+ public Reader getNCharacterStream(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getNClob(int)
+ */
+
+ public NClob getNClob(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getNClob(java.lang.String)
+ */
+
+ public NClob getNClob(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getNString(int)
+ */
+
+ public String getNString(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getNString(java.lang.String)
+ */
+
+ public String getNString(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getObject(int)
+ */
+
+ public Object getObject(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getObject(java.lang.String)
+ */
+
+ public Object getObject(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getObject(int, java.util.Map)
+ */
+
+ public Object getObject(int i, Map> map) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getObject(java.lang.String, java.util.Map)
+ */
+
+ public Object getObject(String parameterName, Map> map) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getRef(int)
+ */
+
+ public Ref getRef(int i) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getRef(java.lang.String)
+ */
+
+ public Ref getRef(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getRowId(int)
+ */
+
+ public RowId getRowId(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getRowId(java.lang.String)
+ */
+
+ public RowId getRowId(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getSQLXML(int)
+ */
+
+ public SQLXML getSQLXML(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getSQLXML(java.lang.String)
+ */
+
+ public SQLXML getSQLXML(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getShort(int)
+ */
+
+ public short getShort(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getShort(java.lang.String)
+ */
+
+ public short getShort(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getString(int)
+ */
+
+ public String getString(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getString(java.lang.String)
+ */
+
+ public String getString(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getTime(int)
+ */
+
+ public Time getTime(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getTime(java.lang.String)
+ */
+
+ public Time getTime(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getTime(int, java.util.Calendar)
+ */
+
+ public Time getTime(int parameterIndex, Calendar cal) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getTime(java.lang.String,
+ * java.util.Calendar)
+ */
+
+ public Time getTime(String parameterName, Calendar cal) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getTimestamp(int)
+ */
+
+ public Timestamp getTimestamp(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getTimestamp(java.lang.String)
+ */
+
+ public Timestamp getTimestamp(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getTimestamp(int, java.util.Calendar)
+ */
+
+ public Timestamp getTimestamp(int parameterIndex, Calendar cal) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getTimestamp(java.lang.String,
+ * java.util.Calendar)
+ */
+
+ public Timestamp getTimestamp(String parameterName, Calendar cal) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getURL(int)
+ */
+
+ public URL getURL(int parameterIndex) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#getURL(java.lang.String)
+ */
+
+ public URL getURL(String parameterName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#registerOutParameter(int, int)
+ */
+
+ public void registerOutParameter(int parameterIndex, int sqlType) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#registerOutParameter(java.lang.String, int)
+ */
+
+ public void registerOutParameter(String parameterName, int sqlType) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#registerOutParameter(int, int, int)
+ */
+
+ public void registerOutParameter(int parameterIndex, int sqlType, int scale)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#registerOutParameter(int, int,
+ * java.lang.String)
+ */
+
+ public void registerOutParameter(int paramIndex, int sqlType, String typeName)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#registerOutParameter(java.lang.String, int,
+ * int)
+ */
+
+ public void registerOutParameter(String parameterName, int sqlType, int scale)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#registerOutParameter(java.lang.String, int,
+ * java.lang.String)
+ */
+
+ public void registerOutParameter(String parameterName, int sqlType,
+ String typeName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setAsciiStream(java.lang.String,
+ * java.io.InputStream)
+ */
+
+ public void setAsciiStream(String parameterName, InputStream x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setAsciiStream(java.lang.String,
+ * java.io.InputStream, int)
+ */
+
+ public void setAsciiStream(String parameterName, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setAsciiStream(java.lang.String,
+ * java.io.InputStream, long)
+ */
+
+ public void setAsciiStream(String parameterName, InputStream x, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBigDecimal(java.lang.String,
+ * java.math.BigDecimal)
+ */
+
+ public void setBigDecimal(String parameterName, BigDecimal x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBinaryStream(java.lang.String,
+ * java.io.InputStream)
+ */
+
+ public void setBinaryStream(String parameterName, InputStream x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBinaryStream(java.lang.String,
+ * java.io.InputStream, int)
+ */
+
+ public void setBinaryStream(String parameterName, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBinaryStream(java.lang.String,
+ * java.io.InputStream, long)
+ */
+
+ public void setBinaryStream(String parameterName, InputStream x, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBlob(java.lang.String, java.sql.Blob)
+ */
+
+ public void setBlob(String parameterName, Blob x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBlob(java.lang.String,
+ * java.io.InputStream)
+ */
+
+ public void setBlob(String parameterName, InputStream inputStream)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBlob(java.lang.String,
+ * java.io.InputStream, long)
+ */
+
+ public void setBlob(String parameterName, InputStream inputStream, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBoolean(java.lang.String, boolean)
+ */
+
+ public void setBoolean(String parameterName, boolean x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setByte(java.lang.String, byte)
+ */
+
+ public void setByte(String parameterName, byte x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setBytes(java.lang.String, byte[])
+ */
+
+ public void setBytes(String parameterName, byte[] x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setCharacterStream(java.lang.String,
+ * java.io.Reader)
+ */
+
+ public void setCharacterStream(String parameterName, Reader reader)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setCharacterStream(java.lang.String,
+ * java.io.Reader, int)
+ */
+
+ public void setCharacterStream(String parameterName, Reader reader, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setCharacterStream(java.lang.String,
+ * java.io.Reader, long)
+ */
+
+ public void setCharacterStream(String parameterName, Reader reader,
+ long length) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setClob(java.lang.String, java.sql.Clob)
+ */
+
+ public void setClob(String parameterName, Clob x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setClob(java.lang.String, java.io.Reader)
+ */
+
+ public void setClob(String parameterName, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setClob(java.lang.String, java.io.Reader,
+ * long)
+ */
+
+ public void setClob(String parameterName, Reader reader, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setDate(java.lang.String, java.sql.Date)
+ */
+
+ public void setDate(String parameterName, Date x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setDate(java.lang.String, java.sql.Date,
+ * java.util.Calendar)
+ */
+
+ public void setDate(String parameterName, Date x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setDouble(java.lang.String, double)
+ */
+
+ public void setDouble(String parameterName, double x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setFloat(java.lang.String, float)
+ */
+
+ public void setFloat(String parameterName, float x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setInt(java.lang.String, int)
+ */
+
+ public void setInt(String parameterName, int x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setLong(java.lang.String, long)
+ */
+
+ public void setLong(String parameterName, long x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setNCharacterStream(java.lang.String,
+ * java.io.Reader)
+ */
+
+ public void setNCharacterStream(String parameterName, Reader value)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setNCharacterStream(java.lang.String,
+ * java.io.Reader, long)
+ */
+
+ public void setNCharacterStream(String parameterName, Reader value,
+ long length) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setNClob(java.lang.String, java.sql.NClob)
+ */
+
+ public void setNClob(String parameterName, NClob value) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setNClob(java.lang.String, java.io.Reader)
+ */
+
+ public void setNClob(String parameterName, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setNClob(java.lang.String, java.io.Reader,
+ * long)
+ */
+
+ public void setNClob(String parameterName, Reader reader, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setNString(java.lang.String,
+ * java.lang.String)
+ */
+
+ public void setNString(String parameterName, String value)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setNull(java.lang.String, int)
+ */
+
+ public void setNull(String parameterName, int sqlType) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setNull(java.lang.String, int,
+ * java.lang.String)
+ */
+
+ public void setNull(String parameterName, int sqlType, String typeName)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setObject(java.lang.String,
+ * java.lang.Object)
+ */
+
+ public void setObject(String parameterName, Object x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setObject(java.lang.String,
+ * java.lang.Object, int)
+ */
+
+ public void setObject(String parameterName, Object x, int targetSqlType)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setObject(java.lang.String,
+ * java.lang.Object, int, int)
+ */
+
+ public void setObject(String parameterName, Object x, int targetSqlType,
+ int scale) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setRowId(java.lang.String, java.sql.RowId)
+ */
+
+ public void setRowId(String parameterName, RowId x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setSQLXML(java.lang.String,
+ * java.sql.SQLXML)
+ */
+
+ public void setSQLXML(String parameterName, SQLXML xmlObject)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setShort(java.lang.String, short)
+ */
+
+ public void setShort(String parameterName, short x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setString(java.lang.String,
+ * java.lang.String)
+ */
+
+ public void setString(String parameterName, String x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setTime(java.lang.String, java.sql.Time)
+ */
+
+ public void setTime(String parameterName, Time x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setTime(java.lang.String, java.sql.Time,
+ * java.util.Calendar)
+ */
+
+ public void setTime(String parameterName, Time x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setTimestamp(java.lang.String,
+ * java.sql.Timestamp)
+ */
+
+ public void setTimestamp(String parameterName, Timestamp x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setTimestamp(java.lang.String,
+ * java.sql.Timestamp, java.util.Calendar)
+ */
+
+ public void setTimestamp(String parameterName, Timestamp x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#setURL(java.lang.String, java.net.URL)
+ */
+
+ public void setURL(String parameterName, URL val) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.CallableStatement#wasNull()
+ */
+
+ public boolean wasNull() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#addBatch()
+ */
+
+ public void addBatch() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#clearParameters()
+ */
+
+ public void clearParameters() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#execute()
+ */
+
+ public boolean execute() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#executeQuery()
+ */
+
+ public ResultSet executeQuery() throws SQLException {
+ return new HiveQueryResultSet.Builder().build();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#executeUpdate()
+ */
+
+ public int executeUpdate() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#getMetaData()
+ */
+
+ public ResultSetMetaData getMetaData() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#getParameterMetaData()
+ */
+
+ public ParameterMetaData getParameterMetaData() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setArray(int, java.sql.Array)
+ */
+
+ public void setArray(int i, Array x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setAsciiStream(int, java.io.InputStream)
+ */
+
+ public void setAsciiStream(int arg0, InputStream arg1) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setAsciiStream(int, java.io.InputStream,
+ * int)
+ */
+
+ public void setAsciiStream(int parameterIndex, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setAsciiStream(int, java.io.InputStream,
+ * long)
+ */
+
+ public void setAsciiStream(int arg0, InputStream arg1, long arg2)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBigDecimal(int, java.math.BigDecimal)
+ */
+
+ public void setBigDecimal(int parameterIndex, BigDecimal x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBinaryStream(int, java.io.InputStream)
+ */
+
+ public void setBinaryStream(int parameterIndex, InputStream x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBinaryStream(int, java.io.InputStream,
+ * int)
+ */
+
+ public void setBinaryStream(int parameterIndex, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBinaryStream(int, java.io.InputStream,
+ * long)
+ */
+
+ public void setBinaryStream(int parameterIndex, InputStream x, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBlob(int, java.sql.Blob)
+ */
+
+ public void setBlob(int i, Blob x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBlob(int, java.io.InputStream)
+ */
+
+ public void setBlob(int parameterIndex, InputStream inputStream)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBlob(int, java.io.InputStream, long)
+ */
+
+ public void setBlob(int parameterIndex, InputStream inputStream, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBoolean(int, boolean)
+ */
+
+ public void setBoolean(int parameterIndex, boolean x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setByte(int, byte)
+ */
+
+ public void setByte(int parameterIndex, byte x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBytes(int, byte[])
+ */
+
+ public void setBytes(int parameterIndex, byte[] x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setCharacterStream(int, java.io.Reader)
+ */
+
+ public void setCharacterStream(int parameterIndex, Reader reader)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setCharacterStream(int, java.io.Reader,
+ * int)
+ */
+
+ public void setCharacterStream(int parameterIndex, Reader reader, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setCharacterStream(int, java.io.Reader,
+ * long)
+ */
+
+ public void setCharacterStream(int parameterIndex, Reader reader, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setClob(int, java.sql.Clob)
+ */
+
+ public void setClob(int i, Clob x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setClob(int, java.io.Reader)
+ */
+
+ public void setClob(int parameterIndex, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setClob(int, java.io.Reader, long)
+ */
+
+ public void setClob(int parameterIndex, Reader reader, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setDate(int, java.sql.Date)
+ */
+
+ public void setDate(int parameterIndex, Date x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setDate(int, java.sql.Date,
+ * java.util.Calendar)
+ */
+
+ public void setDate(int parameterIndex, Date x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setDouble(int, double)
+ */
+
+ public void setDouble(int parameterIndex, double x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setFloat(int, float)
+ */
+
+ public void setFloat(int parameterIndex, float x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setInt(int, int)
+ */
+
+ public void setInt(int parameterIndex, int x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setLong(int, long)
+ */
+
+ public void setLong(int parameterIndex, long x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNCharacterStream(int, java.io.Reader)
+ */
+
+ public void setNCharacterStream(int parameterIndex, Reader value)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNCharacterStream(int, java.io.Reader,
+ * long)
+ */
+
+ public void setNCharacterStream(int parameterIndex, Reader value, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNClob(int, java.sql.NClob)
+ */
+
+ public void setNClob(int parameterIndex, NClob value) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNClob(int, java.io.Reader)
+ */
+
+ public void setNClob(int parameterIndex, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNClob(int, java.io.Reader, long)
+ */
+
+ public void setNClob(int parameterIndex, Reader reader, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNString(int, java.lang.String)
+ */
+
+ public void setNString(int parameterIndex, String value) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNull(int, int)
+ */
+
+ public void setNull(int parameterIndex, int sqlType) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNull(int, int, java.lang.String)
+ */
+
+ public void setNull(int paramIndex, int sqlType, String typeName)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setObject(int, java.lang.Object)
+ */
+
+ public void setObject(int parameterIndex, Object x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setObject(int, java.lang.Object, int)
+ */
+
+ public void setObject(int parameterIndex, Object x, int targetSqlType)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setObject(int, java.lang.Object, int, int)
+ */
+
+ public void setObject(int parameterIndex, Object x, int targetSqlType,
+ int scale) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setRef(int, java.sql.Ref)
+ */
+
+ public void setRef(int i, Ref x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setRowId(int, java.sql.RowId)
+ */
+
+ public void setRowId(int parameterIndex, RowId x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setSQLXML(int, java.sql.SQLXML)
+ */
+
+ public void setSQLXML(int parameterIndex, SQLXML xmlObject)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setShort(int, short)
+ */
+
+ public void setShort(int parameterIndex, short x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setString(int, java.lang.String)
+ */
+
+ public void setString(int parameterIndex, String x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setTime(int, java.sql.Time)
+ */
+
+ public void setTime(int parameterIndex, Time x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setTime(int, java.sql.Time,
+ * java.util.Calendar)
+ */
+
+ public void setTime(int parameterIndex, Time x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setTimestamp(int, java.sql.Timestamp)
+ */
+
+ public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setTimestamp(int, java.sql.Timestamp,
+ * java.util.Calendar)
+ */
+
+ public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setURL(int, java.net.URL)
+ */
+
+ public void setURL(int parameterIndex, URL x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setUnicodeStream(int, java.io.InputStream,
+ * int)
+ */
+
+ public void setUnicodeStream(int parameterIndex, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#addBatch(java.lang.String)
+ */
+
+ public void addBatch(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#cancel()
+ */
+
+ public void cancel() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#clearBatch()
+ */
+
+ public void clearBatch() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#clearWarnings()
+ */
+
+ public void clearWarnings() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#close()
+ */
+
+ public void close() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String)
+ */
+
+ public boolean execute(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, int)
+ */
+
+ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, int[])
+ */
+
+ public boolean execute(String sql, int[] columnIndexes) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, java.lang.String[])
+ */
+
+ public boolean execute(String sql, String[] columnNames) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeBatch()
+ */
+
+ public int[] executeBatch() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeQuery(java.lang.String)
+ */
+
+ public ResultSet executeQuery(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String)
+ */
+
+ public int executeUpdate(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, int)
+ */
+
+ public int executeUpdate(String sql, int autoGeneratedKeys)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, int[])
+ */
+
+ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, java.lang.String[])
+ */
+
+ public int executeUpdate(String sql, String[] columnNames)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getConnection()
+ */
+
+ public Connection getConnection() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getFetchDirection()
+ */
+
+ public int getFetchDirection() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getFetchSize()
+ */
+
+ public int getFetchSize() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getGeneratedKeys()
+ */
+
+ public ResultSet getGeneratedKeys() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMaxFieldSize()
+ */
+
+ public int getMaxFieldSize() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMaxRows()
+ */
+
+ public int getMaxRows() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMoreResults()
+ */
+
+ public boolean getMoreResults() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMoreResults(int)
+ */
+
+ public boolean getMoreResults(int current) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getQueryTimeout()
+ */
+
+ public int getQueryTimeout() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSet()
+ */
+
+ public ResultSet getResultSet() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetConcurrency()
+ */
+
+ public int getResultSetConcurrency() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetHoldability()
+ */
+
+ public int getResultSetHoldability() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetType()
+ */
+
+ public int getResultSetType() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getUpdateCount()
+ */
+
+ public int getUpdateCount() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getWarnings()
+ */
+
+ public SQLWarning getWarnings() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#isClosed()
+ */
+
+ public boolean isClosed() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#isPoolable()
+ */
+
+ public boolean isPoolable() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setCursorName(java.lang.String)
+ */
+
+ public void setCursorName(String name) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setEscapeProcessing(boolean)
+ */
+
+ public void setEscapeProcessing(boolean enable) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setFetchDirection(int)
+ */
+
+ public void setFetchDirection(int direction) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setFetchSize(int)
+ */
+
+ public void setFetchSize(int rows) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setMaxFieldSize(int)
+ */
+
+ public void setMaxFieldSize(int max) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setMaxRows(int)
+ */
+
+ public void setMaxRows(int max) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setPoolable(boolean)
+ */
+
+ public void setPoolable(boolean arg0) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setQueryTimeout(int)
+ */
+
+ public void setQueryTimeout(int seconds) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
+ */
+
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#unwrap(java.lang.Class)
+ */
+
+ public T unwrap(Class iface) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
new file mode 100644
index 0000000..17b4d39
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -0,0 +1,743 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.CallableStatement;
+import java.sql.Clob;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.NClob;
+import java.sql.PreparedStatement;
+import java.sql.SQLClientInfoException;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Savepoint;
+import java.sql.Statement;
+import java.sql.Struct;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+
+import javax.security.sasl.SaslException;
+
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.auth.KerberosSaslHelper;
+import org.apache.hive.service.auth.PlainSaslHelper;
+import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCloseSessionReq;
+import org.apache.hive.service.cli.thrift.TOpenSessionReq;
+import org.apache.hive.service.cli.thrift.TOpenSessionResp;
+import org.apache.hive.service.cli.thrift.TProtocolVersion;
+import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+
+/**
+ * HiveConnection.
+ *
+ */
+public class HiveConnection implements java.sql.Connection {
+ private static final String HIVE_AUTH_TYPE= "auth";
+ private static final String HIVE_AUTH_SIMPLE = "noSasl";
+ private static final String HIVE_AUTH_USER = "user";
+ private static final String HIVE_AUTH_PRINCIPAL = "principal";
+ private static final String HIVE_AUTH_PASSWD = "password";
+ private static final String HIVE_ANONYMOUS_USER = "anonymous";
+ private static final String HIVE_ANONYMOUS_PASSWD = "anonymous";
+
+ private TTransport transport;
+ private TCLIService.Iface client;
+ private boolean isClosed = true;
+ private SQLWarning warningChain = null;
+ private TSessionHandle sessHandle = null;
+ private final List supportedProtocols = new LinkedList();
+ /**
+ * TODO: - parse uri (use java.net.URI?).
+ */
+ public HiveConnection(String uri, Properties info) throws SQLException {
+ Utils.JdbcConnectionParams connParams = Utils.parseURL(uri);
+ if (connParams.isEmbeddedMode()) {
+ client = new EmbeddedThriftCLIService();
+ } else {
+ // extract user/password from JDBC connection properties if its not supplied in the connection URL
+ if (info.containsKey(HIVE_AUTH_USER)) {
+ connParams.getSessionVars().put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
+ if (info.containsKey(HIVE_AUTH_PASSWD)) {
+ connParams.getSessionVars().put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
+ }
+ }
+
+ openTransport(uri, connParams.getHost(), connParams.getPort(), connParams.getSessionVars());
+ }
+
+ // currently only V1 is supported
+ supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1);
+
+ // open client session
+ openSession(uri);
+
+ configureConnection(connParams);
+ }
+
+ private void configureConnection(Utils.JdbcConnectionParams connParams)
+ throws SQLException {
+ // set the hive variable in session state for local mode
+ if (connParams.isEmbeddedMode()) {
+ if (!connParams.getHiveVars().isEmpty()) {
+ SessionState.get().setHiveVariables(connParams.getHiveVars());
+ }
+ } else {
+ // for remote JDBC client, try to set the conf var using 'set foo=bar'
+ Statement stmt = createStatement();
+ for (Entry hiveConf : connParams.getHiveConfs().entrySet()) {
+ stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
+ stmt.close();
+ }
+ }
+ }
+
+ private void openTransport(String uri, String host, int port, Map sessConf )
+ throws SQLException {
+ transport = new TSocket(host, port);
+
+ // handle secure connection if specified
+ if (!sessConf.containsKey(HIVE_AUTH_TYPE)
+ || !sessConf.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)){
+ try {
+ if (sessConf.containsKey(HIVE_AUTH_PRINCIPAL)) {
+ transport = KerberosSaslHelper.getKerberosTransport(
+ sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport);
+ } else {
+ String userName = sessConf.get(HIVE_AUTH_USER);
+ if ((userName == null) || userName.isEmpty()) {
+ userName = HIVE_ANONYMOUS_USER;
+ }
+ String passwd = sessConf.get(HIVE_AUTH_PASSWD);
+ if ((passwd == null) || passwd.isEmpty()) {
+ passwd = HIVE_ANONYMOUS_PASSWD;
+ }
+ transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
+ }
+ } catch (SaslException e) {
+ throw new SQLException("Could not establish secure connection to "
+ + uri + ": " + e.getMessage(), " 08S01");
+ }
+ }
+
+ TProtocol protocol = new TBinaryProtocol(transport);
+ client = new TCLIService.Client(protocol);
+ try {
+ transport.open();
+ } catch (TTransportException e) {
+ e.printStackTrace();
+ throw new SQLException("Could not establish connection to "
+ + uri + ": " + e.getMessage(), " 08S01");
+ }
+ }
+
+ private void openSession(String uri) throws SQLException {
+ TOpenSessionReq openReq = new TOpenSessionReq();
+
+ // set the session configuration
+ // openReq.setConfiguration(null);
+
+ try {
+ TOpenSessionResp openResp = client.OpenSession(openReq);
+
+ // validate connection
+ Utils.verifySuccess(openResp.getStatus());
+ if (!supportedProtocols.contains(openResp.getServerProtocolVersion())) {
+ throw new TException("Unsupported Hive2 protocol");
+ }
+ sessHandle = openResp.getSessionHandle();
+ } catch (TException e) {
+ throw new SQLException("Could not establish connection to "
+ + uri + ": " + e.getMessage(), " 08S01");
+ }
+ isClosed = false;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#clearWarnings()
+ */
+
+ public void clearWarnings() throws SQLException {
+ warningChain = null;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#close()
+ */
+
+ public void close() throws SQLException {
+ if (!isClosed) {
+ TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle);
+ try {
+ client.CloseSession(closeReq);
+ } catch (TException e) {
+ throw new SQLException("Error while cleaning up the server resources", e);
+ } finally {
+ isClosed = true;
+ if (transport != null) {
+ transport.close();
+ }
+ }
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#commit()
+ */
+
+ public void commit() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#createArrayOf(java.lang.String,
+ * java.lang.Object[])
+ */
+
+ public Array createArrayOf(String arg0, Object[] arg1) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#createBlob()
+ */
+
+ public Blob createBlob() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#createClob()
+ */
+
+ public Clob createClob() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#createNClob()
+ */
+
+ public NClob createNClob() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#createSQLXML()
+ */
+
+ public SQLXML createSQLXML() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /**
+ * Creates a Statement object for sending SQL statements to the database.
+ *
+ * @throws SQLException
+ * if a database access error occurs.
+ * @see java.sql.Connection#createStatement()
+ */
+
+ public Statement createStatement() throws SQLException {
+ if (isClosed) {
+ throw new SQLException("Can't create Statement, connection is closed");
+ }
+ return new HiveStatement(client, sessHandle);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#createStatement(int, int)
+ */
+
+ public Statement createStatement(int resultSetType, int resultSetConcurrency)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#createStatement(int, int, int)
+ */
+
+ public Statement createStatement(int resultSetType, int resultSetConcurrency,
+ int resultSetHoldability) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#createStruct(java.lang.String, java.lang.Object[])
+ */
+
+ public Struct createStruct(String typeName, Object[] attributes)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getAutoCommit()
+ */
+
+ public boolean getAutoCommit() throws SQLException {
+ return true;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getCatalog()
+ */
+
+ public String getCatalog() throws SQLException {
+ return "";
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getClientInfo()
+ */
+
+ public Properties getClientInfo() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getClientInfo(java.lang.String)
+ */
+
+ public String getClientInfo(String name) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getHoldability()
+ */
+
+ public int getHoldability() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getMetaData()
+ */
+
+ public DatabaseMetaData getMetaData() throws SQLException {
+ return new HiveDatabaseMetaData(client, sessHandle);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getTransactionIsolation()
+ */
+
+ public int getTransactionIsolation() throws SQLException {
+ return Connection.TRANSACTION_NONE;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getTypeMap()
+ */
+
+ public Map> getTypeMap() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#getWarnings()
+ */
+
+ public SQLWarning getWarnings() throws SQLException {
+ return warningChain;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#isClosed()
+ */
+
+ public boolean isClosed() throws SQLException {
+ return isClosed;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#isReadOnly()
+ */
+
+ public boolean isReadOnly() throws SQLException {
+ return false;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#isValid(int)
+ */
+
+ public boolean isValid(int timeout) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#nativeSQL(java.lang.String)
+ */
+
+ public String nativeSQL(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareCall(java.lang.String)
+ */
+
+ public CallableStatement prepareCall(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareCall(java.lang.String, int, int)
+ */
+
+ public CallableStatement prepareCall(String sql, int resultSetType,
+ int resultSetConcurrency) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareCall(java.lang.String, int, int, int)
+ */
+
+ public CallableStatement prepareCall(String sql, int resultSetType,
+ int resultSetConcurrency, int resultSetHoldability) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareStatement(java.lang.String)
+ */
+
+ public PreparedStatement prepareStatement(String sql) throws SQLException {
+ return new HivePreparedStatement(client, sessHandle, sql);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareStatement(java.lang.String, int)
+ */
+
+ public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys)
+ throws SQLException {
+ return new HivePreparedStatement(client, sessHandle, sql);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareStatement(java.lang.String, int[])
+ */
+
+ public PreparedStatement prepareStatement(String sql, int[] columnIndexes)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareStatement(java.lang.String,
+ * java.lang.String[])
+ */
+
+ public PreparedStatement prepareStatement(String sql, String[] columnNames)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareStatement(java.lang.String, int, int)
+ */
+
+ public PreparedStatement prepareStatement(String sql, int resultSetType,
+ int resultSetConcurrency) throws SQLException {
+ return new HivePreparedStatement(client, sessHandle, sql);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#prepareStatement(java.lang.String, int, int, int)
+ */
+
+ public PreparedStatement prepareStatement(String sql, int resultSetType,
+ int resultSetConcurrency, int resultSetHoldability) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#releaseSavepoint(java.sql.Savepoint)
+ */
+
+ public void releaseSavepoint(Savepoint savepoint) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#rollback()
+ */
+
+ public void rollback() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#rollback(java.sql.Savepoint)
+ */
+
+ public void rollback(Savepoint savepoint) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setAutoCommit(boolean)
+ */
+
+ public void setAutoCommit(boolean autoCommit) throws SQLException {
+ if (autoCommit) {
+ throw new SQLException("enabling autocommit is not supported");
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setCatalog(java.lang.String)
+ */
+
+ public void setCatalog(String catalog) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setClientInfo(java.util.Properties)
+ */
+
+ public void setClientInfo(Properties properties)
+ throws SQLClientInfoException {
+ // TODO Auto-generated method stub
+ throw new SQLClientInfoException("Method not supported", null);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setClientInfo(java.lang.String, java.lang.String)
+ */
+
+ public void setClientInfo(String name, String value)
+ throws SQLClientInfoException {
+ // TODO Auto-generated method stub
+ throw new SQLClientInfoException("Method not supported", null);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setHoldability(int)
+ */
+
+ public void setHoldability(int holdability) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setReadOnly(boolean)
+ */
+
+ public void setReadOnly(boolean readOnly) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setSavepoint()
+ */
+
+ public Savepoint setSavepoint() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setSavepoint(java.lang.String)
+ */
+
+ public Savepoint setSavepoint(String name) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setTransactionIsolation(int)
+ */
+
+ public void setTransactionIsolation(int level) throws SQLException {
+ // TODO: throw an exception?
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Connection#setTypeMap(java.util.Map)
+ */
+
+ public void setTypeMap(Map> map) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
+ */
+
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#unwrap(java.lang.Class)
+ */
+
+ public T unwrap(Class iface) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java
new file mode 100644
index 0000000..1053485
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.io.PrintWriter;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import javax.sql.DataSource;
+
+/**
+ * HiveDataSource.
+ *
+ */
+public class HiveDataSource implements DataSource {
+
+ /**
+ *
+ */
+ public HiveDataSource() {
+ // TODO Auto-generated constructor stub
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see javax.sql.DataSource#getConnection()
+ */
+
+ public Connection getConnection() throws SQLException {
+ return getConnection("", "");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see javax.sql.DataSource#getConnection(java.lang.String, java.lang.String)
+ */
+
+ public Connection getConnection(String username, String password)
+ throws SQLException {
+ try {
+ return new HiveConnection("", null);
+ } catch (Exception ex) {
+ throw new SQLException("Error in getting HiveConnection",ex);
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see javax.sql.CommonDataSource#getLogWriter()
+ */
+
+ public PrintWriter getLogWriter() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see javax.sql.CommonDataSource#getLoginTimeout()
+ */
+
+ public int getLoginTimeout() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see javax.sql.CommonDataSource#setLogWriter(java.io.PrintWriter)
+ */
+
+ public void setLogWriter(PrintWriter arg0) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see javax.sql.CommonDataSource#setLoginTimeout(int)
+ */
+
+ public void setLoginTimeout(int arg0) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
+ */
+
+ public boolean isWrapperFor(Class> arg0) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#unwrap(java.lang.Class)
+ */
+
+ public T unwrap(Class arg0) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
new file mode 100644
index 0000000..3d1a969
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
@@ -0,0 +1,1097 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.RowIdLifetime;
+import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.jar.Attributes;
+
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hive.service.cli.thrift.TGetCatalogsReq;
+import org.apache.hive.service.cli.thrift.TGetCatalogsResp;
+import org.apache.hive.service.cli.thrift.TGetColumnsReq;
+import org.apache.hive.service.cli.thrift.TGetColumnsResp;
+import org.apache.hive.service.cli.thrift.TGetFunctionsReq;
+import org.apache.hive.service.cli.thrift.TGetFunctionsResp;
+import org.apache.hive.service.cli.thrift.TGetSchemasReq;
+import org.apache.hive.service.cli.thrift.TGetSchemasResp;
+import org.apache.hive.service.cli.thrift.TGetTableTypesReq;
+import org.apache.hive.service.cli.thrift.TGetTableTypesResp;
+import org.apache.hive.service.cli.thrift.TGetTablesReq;
+import org.apache.hive.service.cli.thrift.TGetTablesResp;
+import org.apache.hive.service.cli.thrift.TGetTypeInfoReq;
+import org.apache.hive.service.cli.thrift.TGetTypeInfoResp;
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.thrift.TException;
+
+/**
+ * HiveDatabaseMetaData.
+ *
+ */
+public class HiveDatabaseMetaData implements DatabaseMetaData {
+
+ private final TCLIService.Iface client;
+ private final TSessionHandle sessHandle;
+ private static final String CATALOG_SEPARATOR = ".";
+
+ private static final char SEARCH_STRING_ESCAPE = '\\';
+
+ // The maximum column length = MFieldSchema.FNAME in metastore/src/model/package.jdo
+ private static final int maxColumnNameLength = 128;
+
+ /**
+ *
+ */
+ public HiveDatabaseMetaData(TCLIService.Iface client, TSessionHandle sessHandle) {
+ this.client = client;
+ this.sessHandle = sessHandle;
+ }
+
+ public boolean allProceduresAreCallable() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean allTablesAreSelectable() throws SQLException {
+ return true;
+ }
+
+ public boolean autoCommitFailureClosesAllResultSets() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean dataDefinitionCausesTransactionCommit() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean dataDefinitionIgnoredInTransactions() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean deletesAreDetected(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean doesMaxRowSizeIncludeBlobs() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getAttributes(String catalog, String schemaPattern,
+ String typeNamePattern, String attributeNamePattern) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getBestRowIdentifier(String catalog, String schema,
+ String table, int scope, boolean nullable) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getCatalogSeparator() throws SQLException {
+ return CATALOG_SEPARATOR;
+ }
+
+ public String getCatalogTerm() throws SQLException {
+ return "instance";
+ }
+
+ public ResultSet getCatalogs() throws SQLException {
+ TGetCatalogsResp catalogResp;
+
+ try {
+ catalogResp = client.GetCatalogs(new TGetCatalogsReq(sessHandle));
+ } catch (TException e) {
+ throw new SQLException(e.getMessage(), "08S01");
+ }
+ Utils.verifySuccess(catalogResp.getStatus());
+
+ return new HiveQueryResultSet.Builder()
+ .setClient(client)
+ .setSessionHandle(sessHandle)
+ .setStmtHandle(catalogResp.getOperationHandle())
+ .build();
+ }
+
+ public ResultSet getClientInfoProperties() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getColumnPrivileges(String catalog, String schema,
+ String table, String columnNamePattern) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /**
+ * Convert a pattern containing JDBC catalog search wildcards into
+ * Java regex patterns.
+ *
+ * @param pattern input which may contain '%' or '_' wildcard characters, or
+ * these characters escaped using {@link #getSearchStringEscape()}.
+ * @return replace %/_ with regex search characters, also handle escaped
+ * characters.
+ */
+ private String convertPattern(final String pattern) {
+ if (pattern==null) {
+ return ".*";
+ } else {
+ StringBuilder result = new StringBuilder(pattern.length());
+
+ boolean escaped = false;
+ for (int i = 0, len = pattern.length(); i < len; i++) {
+ char c = pattern.charAt(i);
+ if (escaped) {
+ if (c != SEARCH_STRING_ESCAPE) {
+ escaped = false;
+ }
+ result.append(c);
+ } else {
+ if (c == SEARCH_STRING_ESCAPE) {
+ escaped = true;
+ continue;
+ } else if (c == '%') {
+ result.append(".*");
+ } else if (c == '_') {
+ result.append('.');
+ } else {
+ result.append(Character.toLowerCase(c));
+ }
+ }
+ }
+
+ return result.toString();
+ }
+ }
+
+ public ResultSet getColumns(String catalog, String schemaPattern,
+ String tableNamePattern, String columnNamePattern) throws SQLException {
+ TGetColumnsResp colResp;
+ TGetColumnsReq colReq = new TGetColumnsReq();
+ colReq.setSessionHandle(sessHandle);
+ colReq.setCatalogName(catalog);
+ colReq.setSchemaName(schemaPattern);
+ colReq.setTableName(tableNamePattern);
+ colReq.setColumnName(columnNamePattern);
+ try {
+ colResp = client.GetColumns(colReq);
+ } catch (TException e) {
+ throw new SQLException(e.getMessage(), "08S01");
+ }
+ Utils.verifySuccess(colResp.getStatus());
+ // build the resultset from response
+ return new HiveQueryResultSet.Builder()
+ .setClient(client)
+ .setSessionHandle(sessHandle)
+ .setStmtHandle(colResp.getOperationHandle())
+ .build();
+ }
+
+ /**
+ * We sort the output of getColumns to guarantee jdbc compliance.
+ * First check by table name then by ordinal position
+ */
+ private class GetColumnsComparator implements Comparator {
+
+ public int compare(JdbcColumn o1, JdbcColumn o2) {
+ int compareName = o1.getTableName().compareTo(o2.getTableName());
+ if (compareName==0) {
+ if (o1.getOrdinalPos() > o2.getOrdinalPos()) {
+ return 1;
+ } else if (o1.getOrdinalPos() < o2.getOrdinalPos()) {
+ return -1;
+ }
+ return 0;
+ } else {
+ return compareName;
+ }
+ }
+ }
+
+ public Connection getConnection() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getCrossReference(String primaryCatalog,
+ String primarySchema, String primaryTable, String foreignCatalog,
+ String foreignSchema, String foreignTable) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getDatabaseMajorVersion() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getDatabaseMinorVersion() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getDatabaseProductName() throws SQLException {
+ return "Hive";
+ }
+
+ public String getDatabaseProductVersion() throws SQLException {
+ // TODO: Fetch this from the server side
+ return "0.10.0";
+ }
+
+ public int getDefaultTransactionIsolation() throws SQLException {
+ return Connection.TRANSACTION_NONE;
+ }
+
+ public int getDriverMajorVersion() {
+ return HiveDriver.getMajorDriverVersion();
+ }
+
+ public int getDriverMinorVersion() {
+ return HiveDriver.getMinorDriverVersion();
+ }
+
+ public String getDriverName() throws SQLException {
+ return HiveDriver.fetchManifestAttribute(Attributes.Name.IMPLEMENTATION_TITLE);
+ }
+
+ public String getDriverVersion() throws SQLException {
+ return HiveDriver.fetchManifestAttribute(Attributes.Name.IMPLEMENTATION_VERSION);
+ }
+
+ public ResultSet getExportedKeys(String catalog, String schema, String table)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getExtraNameCharacters() throws SQLException {
+ // TODO: verify that this is correct
+ return "";
+ }
+
+ public ResultSet getFunctionColumns(String arg0, String arg1, String arg2,
+ String arg3) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getFunctions(String catalogName, String schemaPattern, String functionNamePattern)
+ throws SQLException {
+ TGetFunctionsResp funcResp;
+ TGetFunctionsReq getFunctionsReq = new TGetFunctionsReq();
+ getFunctionsReq.setSessionHandle(sessHandle);
+ getFunctionsReq.setCatalogName(catalogName);
+ getFunctionsReq.setSchemaName(schemaPattern);
+ getFunctionsReq.setFunctionName(functionNamePattern);
+
+ try {
+ funcResp = client.GetFunctions(getFunctionsReq);
+ } catch (TException e) {
+ throw new SQLException(e.getMessage(), "08S01");
+ }
+ Utils.verifySuccess(funcResp.getStatus());
+
+ return new HiveQueryResultSet.Builder()
+ .setClient(client)
+ .setSessionHandle(sessHandle)
+ .setStmtHandle(funcResp.getOperationHandle())
+ .build();
+ }
+
+ public String getIdentifierQuoteString() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getImportedKeys(String catalog, String schema, String table)
+ throws SQLException {
+ return new HiveQueryResultSet.Builder()
+ .setClient(client)
+ .setEmptyResultSet(true)
+ .setSchema(
+ Arrays.asList(
+ "PKTABLE_CAT",
+ "PKTABLE_SCHEM",
+ "PKTABLE_NAME",
+ "PKCOLUMN_NAME",
+ "FKTABLE_CAT",
+ "FKTABLE_SCHEM",
+ "FKTABLE_NAME",
+ "FKCOLUMN_NAME",
+ "KEY_SEQ",
+ "UPDATE_RULE",
+ "DELETE_RULE",
+ "FK_NAME",
+ "PK_NAME",
+ "DEFERRABILITY"),
+ Arrays.asList(
+ "STRING",
+ "STRING",
+ "STRING",
+ "STRING",
+ "STRING",
+ "STRING",
+ "STRING",
+ "STRING",
+ "SMALLINT",
+ "SMALLINT",
+ "SMALLINT",
+ "STRING",
+ "STRING",
+ "STRING"))
+ .build();
+ }
+
+ public ResultSet getIndexInfo(String catalog, String schema, String table,
+ boolean unique, boolean approximate) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getJDBCMajorVersion() throws SQLException {
+ return 3;
+ }
+
+ public int getJDBCMinorVersion() throws SQLException {
+ return 0;
+ }
+
+ public int getMaxBinaryLiteralLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxCatalogNameLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxCharLiteralLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /**
+ * Returns the value of maxColumnNameLength.
+ *
+ */
+ public int getMaxColumnNameLength() throws SQLException {
+ return maxColumnNameLength;
+ }
+
+ public int getMaxColumnsInGroupBy() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxColumnsInIndex() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxColumnsInOrderBy() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxColumnsInSelect() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxColumnsInTable() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxConnections() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxCursorNameLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxIndexLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxProcedureNameLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxRowSize() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxSchemaNameLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxStatementLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxStatements() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxTableNameLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxTablesInSelect() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getMaxUserNameLength() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getNumericFunctions() throws SQLException {
+ return "";
+ }
+
+ public ResultSet getPrimaryKeys(String catalog, String schema, String table)
+ throws SQLException {
+ // Hive doesn't support primary keys
+ // using local schema with empty resultset
+ return new HiveQueryResultSet.Builder().setClient(client).setEmptyResultSet(true).
+ setSchema(Arrays.asList("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "KEY_SEQ", "PK_NAME" ),
+ Arrays.asList("STRING", "STRING", "STRING", "STRING", "INT", "STRING"))
+ .build();
+ }
+
+ public ResultSet getProcedureColumns(String catalog, String schemaPattern,
+ String procedureNamePattern, String columnNamePattern)
+ throws SQLException {
+ // Hive doesn't support primary keys
+ // using local schema with empty resultset
+ return new HiveQueryResultSet.Builder().setClient(client).setEmptyResultSet(true).
+ setSchema(
+ Arrays.asList("PROCEDURE_CAT", "PROCEDURE_SCHEM", "PROCEDURE_NAME", "COLUMN_NAME", "COLUMN_TYPE",
+ "DATA_TYPE", "TYPE_NAME", "PRECISION", "LENGTH", "SCALE", "RADIX", "NULLABLE", "REMARKS",
+ "COLUMN_DEF", "SQL_DATA_TYPE", "SQL_DATETIME_SUB", "CHAR_OCTET_LENGTH", "ORDINAL_POSITION",
+ "IS_NULLABLE", "SPECIFIC_NAME"),
+ Arrays.asList("STRING", "STRING", "STRING", "STRING", "SMALLINT", "INT",
+ "STRING", "INT", "INT", "SMALLINT", "SMALLINT", "SMALLINT", "STRING", "STRING",
+ "INT", "INT", "INT", "INT",
+ "STRING", "STRING"))
+ .build();
+ }
+
+ public String getProcedureTerm() throws SQLException {
+ return new String("UDF");
+ }
+
+ public ResultSet getProcedures(String catalog, String schemaPattern,
+ String procedureNamePattern) throws SQLException {
+ // Hive doesn't support primary keys
+ // using local schema with empty resultset
+ return new HiveQueryResultSet.Builder().setClient(client).setEmptyResultSet(true).
+ setSchema(
+ Arrays.asList("PROCEDURE_CAT", "PROCEDURE_SCHEM", "PROCEDURE_NAME", "RESERVERD", "RESERVERD",
+ "RESERVERD", "REMARKS", "PROCEDURE_TYPE", "SPECIFIC_NAME"),
+ Arrays.asList("STRING", "STRING", "STRING", "STRING", "STRING",
+ "STRING", "STRING", "SMALLINT", "STRING"))
+ .build();
+ }
+
+ public int getResultSetHoldability() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public RowIdLifetime getRowIdLifetime() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getSQLKeywords() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getSQLStateType() throws SQLException {
+ return DatabaseMetaData.sqlStateSQL99;
+ }
+
+ public String getSchemaTerm() throws SQLException {
+ return "database";
+ }
+
+ public ResultSet getSchemas() throws SQLException {
+ return getSchemas(null, null);
+ }
+
+ public ResultSet getSchemas(String catalog, String schemaPattern)
+ throws SQLException {
+ TGetSchemasResp schemaResp;
+
+ TGetSchemasReq schemaReq = new TGetSchemasReq();
+ schemaReq.setSessionHandle(sessHandle);
+ if (catalog != null) {
+ schemaReq.setCatalogName(catalog);
+ }
+ if (schemaPattern == null) {
+ schemaPattern = "%";
+ }
+ schemaReq.setSchemaName(schemaPattern);
+
+ try {
+ schemaResp = client.GetSchemas(schemaReq);
+ } catch (TException e) {
+ throw new SQLException(e.getMessage(), "08S01");
+ }
+ Utils.verifySuccess(schemaResp.getStatus());
+
+ return new HiveQueryResultSet.Builder()
+ .setClient(client)
+ .setSessionHandle(sessHandle)
+ .setStmtHandle(schemaResp.getOperationHandle())
+ .build();
+ }
+
+ public String getSearchStringEscape() throws SQLException {
+ return String.valueOf(SEARCH_STRING_ESCAPE);
+ }
+
+ public String getStringFunctions() throws SQLException {
+ return "";
+ }
+
+ public ResultSet getSuperTables(String catalog, String schemaPattern,
+ String tableNamePattern) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getSuperTypes(String catalog, String schemaPattern,
+ String typeNamePattern) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getSystemFunctions() throws SQLException {
+ return "";
+ }
+
+ public ResultSet getTablePrivileges(String catalog, String schemaPattern,
+ String tableNamePattern) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getTableTypes() throws SQLException {
+ TGetTableTypesResp tableTypeResp;
+
+ try {
+ tableTypeResp = client.GetTableTypes(new TGetTableTypesReq(sessHandle));
+ } catch (TException e) {
+ throw new SQLException(e.getMessage(), "08S01");
+ }
+ Utils.verifySuccess(tableTypeResp.getStatus());
+
+ return new HiveQueryResultSet.Builder()
+ .setClient(client)
+ .setSessionHandle(sessHandle)
+ .setStmtHandle(tableTypeResp.getOperationHandle())
+ .build();
+ }
+
+ public ResultSet getTables(String catalog, String schemaPattern,
+ String tableNamePattern, String[] types) throws SQLException {
+ TGetTablesResp getTableResp;
+ if (schemaPattern == null) {
+ // if schemaPattern is null it means that the schemaPattern value should not be used to narrow the search
+ schemaPattern = "%";
+ }
+ TGetTablesReq getTableReq = new TGetTablesReq(sessHandle);
+ getTableReq.setTableName(tableNamePattern);
+
+ // TODO: need to set catalog parameter
+
+ if (types != null) {
+ getTableReq.setTableTypes(Arrays.asList(types));
+ }
+ if (schemaPattern != null) {
+ getTableReq.setSchemaName(schemaPattern);
+ }
+
+ try {
+ getTableResp = client.GetTables(getTableReq);
+ } catch (TException e) {
+ throw new SQLException(e.getMessage(), "08S01");
+ }
+ Utils.verifySuccess(getTableResp.getStatus());
+
+ return new HiveQueryResultSet.Builder()
+ .setClient(client)
+ .setSessionHandle(sessHandle)
+ .setStmtHandle(getTableResp.getOperationHandle())
+ .build();
+ }
+
+ /**
+ * We sort the output of getTables to guarantee jdbc compliance.
+ * First check by table type then by table name
+ */
+ private class GetTablesComparator implements Comparator {
+
+ public int compare(JdbcTable o1, JdbcTable o2) {
+ int compareType = o1.getType().compareTo(o2.getType());
+ if (compareType==0) {
+ return o1.getTableName().compareTo(o2.getTableName());
+ } else {
+ return compareType;
+ }
+ }
+ }
+
+ /**
+ * Translate hive table types into jdbc table types.
+ * @param hivetabletype
+ * @return the type of the table
+ */
+ public static String toJdbcTableType(String hivetabletype) {
+ if (hivetabletype==null) {
+ return null;
+ } else if (hivetabletype.equals(TableType.MANAGED_TABLE.toString())) {
+ return "TABLE";
+ } else if (hivetabletype.equals(TableType.VIRTUAL_VIEW.toString())) {
+ return "VIEW";
+ } else if (hivetabletype.equals(TableType.EXTERNAL_TABLE.toString())) {
+ return "EXTERNAL TABLE";
+ } else {
+ return hivetabletype;
+ }
+ }
+
+ public String getTimeDateFunctions() throws SQLException {
+ return "";
+ }
+
+ public ResultSet getTypeInfo() throws SQLException {
+ TGetTypeInfoResp getTypeInfoResp;
+ TGetTypeInfoReq getTypeInfoReq = new TGetTypeInfoReq();
+ getTypeInfoReq.setSessionHandle(sessHandle);
+ try {
+ getTypeInfoResp = client.GetTypeInfo(getTypeInfoReq);
+ } catch (TException e) {
+ throw new SQLException(e.getMessage(), "08S01");
+ }
+ Utils.verifySuccess(getTypeInfoResp.getStatus());
+ return new HiveQueryResultSet.Builder()
+ .setClient(client)
+ .setSessionHandle(sessHandle)
+ .setStmtHandle(getTypeInfoResp.getOperationHandle())
+ .build();
+ }
+
+ public ResultSet getUDTs(String catalog, String schemaPattern,
+ String typeNamePattern, int[] types) throws SQLException {
+
+ return new HiveMetaDataResultSet(
+ Arrays.asList("TYPE_CAT", "TYPE_SCHEM", "TYPE_NAME", "CLASS_NAME", "DATA_TYPE"
+ , "REMARKS", "BASE_TYPE")
+ , Arrays.asList("STRING", "STRING", "STRING", "STRING", "INT", "STRING", "INT")
+ , null) {
+
+ public boolean next() throws SQLException {
+ return false;
+ }
+ };
+ }
+
+ public String getURL() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getUserName() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public ResultSet getVersionColumns(String catalog, String schema, String table)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean insertsAreDetected(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isCatalogAtStart() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isReadOnly() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean locatorsUpdateCopy() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean nullPlusNonNullIsNull() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean nullsAreSortedAtEnd() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean nullsAreSortedAtStart() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean nullsAreSortedHigh() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean nullsAreSortedLow() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean othersDeletesAreVisible(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean othersInsertsAreVisible(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean othersUpdatesAreVisible(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean ownDeletesAreVisible(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean ownInsertsAreVisible(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean ownUpdatesAreVisible(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean storesLowerCaseIdentifiers() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean storesLowerCaseQuotedIdentifiers() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean storesMixedCaseIdentifiers() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean storesMixedCaseQuotedIdentifiers() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean storesUpperCaseIdentifiers() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean storesUpperCaseQuotedIdentifiers() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsANSI92EntryLevelSQL() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsANSI92FullSQL() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsANSI92IntermediateSQL() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsAlterTableWithAddColumn() throws SQLException {
+ return true;
+ }
+
+ public boolean supportsAlterTableWithDropColumn() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsBatchUpdates() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsCatalogsInDataManipulation() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsCatalogsInIndexDefinitions() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsCatalogsInProcedureCalls() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsCatalogsInTableDefinitions() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsColumnAliasing() throws SQLException {
+ return true;
+ }
+
+ public boolean supportsConvert() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsConvert(int fromType, int toType) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsCoreSQLGrammar() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsCorrelatedSubqueries() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsDataDefinitionAndDataManipulationTransactions()
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsDataManipulationTransactionsOnly() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsDifferentTableCorrelationNames() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsExpressionsInOrderBy() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsExtendedSQLGrammar() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsFullOuterJoins() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsGetGeneratedKeys() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsGroupBy() throws SQLException {
+ return true;
+ }
+
+ public boolean supportsGroupByBeyondSelect() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsGroupByUnrelated() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsIntegrityEnhancementFacility() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsLikeEscapeClause() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsLimitedOuterJoins() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsMinimumSQLGrammar() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsMixedCaseIdentifiers() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsMultipleOpenResults() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsMultipleResultSets() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsMultipleTransactions() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsNamedParameters() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsNonNullableColumns() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsOpenCursorsAcrossCommit() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsOpenCursorsAcrossRollback() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsOpenStatementsAcrossCommit() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsOpenStatementsAcrossRollback() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsOrderByUnrelated() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsOuterJoins() throws SQLException {
+ return true;
+ }
+
+ public boolean supportsPositionedDelete() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsPositionedUpdate() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsResultSetConcurrency(int type, int concurrency)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsResultSetHoldability(int holdability)
+ throws SQLException {
+ return false;
+ }
+
+ public boolean supportsResultSetType(int type) throws SQLException {
+ return true;
+ }
+
+ public boolean supportsSavepoints() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsSchemasInDataManipulation() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsSchemasInIndexDefinitions() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsSchemasInProcedureCalls() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsSchemasInTableDefinitions() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsSelectForUpdate() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsStatementPooling() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsStoredProcedures() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsSubqueriesInComparisons() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsSubqueriesInExists() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsSubqueriesInIns() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsSubqueriesInQuantifieds() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsTableCorrelationNames() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsTransactionIsolationLevel(int level)
+ throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsTransactions() throws SQLException {
+ return false;
+ }
+
+ public boolean supportsUnion() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean supportsUnionAll() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean updatesAreDetected(int type) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean usesLocalFilePerTable() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean usesLocalFiles() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public T unwrap(Class iface) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public static void main(String[] args) throws SQLException {
+ HiveDatabaseMetaData meta = new HiveDatabaseMetaData(null, null);
+ System.out.println("DriverName: " + meta.getDriverName());
+ System.out.println("DriverVersion: " + meta.getDriverVersion());
+ }
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
new file mode 100644
index 0000000..6093070
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
@@ -0,0 +1,297 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.io.IOException;
+import java.net.URL;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverPropertyInfo;
+import java.sql.SQLException;
+import java.util.Properties;
+import java.util.jar.Attributes;
+import java.util.jar.Manifest;
+import java.util.regex.Pattern;
+/**
+ * HiveDriver.
+ *
+ */
+public class HiveDriver implements Driver {
+ static {
+ try {
+ java.sql.DriverManager.registerDriver(new HiveDriver());
+ } catch (SQLException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Is this driver JDBC compliant?
+ */
+ private static final boolean JDBC_COMPLIANT = false;
+
+ /**
+ * The required prefix for the connection URL.
+ */
+ private static final String URL_PREFIX = "jdbc:hive2://";
+
+ /**
+ * If host is provided, without a port.
+ */
+ private static final String DEFAULT_PORT = "10000";
+
+ /**
+ * Property key for the database name.
+ */
+ private static final String DBNAME_PROPERTY_KEY = "DBNAME";
+
+ /**
+ * Property key for the Hive Server2 host.
+ */
+ private static final String HOST_PROPERTY_KEY = "HOST";
+
+ /**
+ * Property key for the Hive Server2 port.
+ */
+ private static final String PORT_PROPERTY_KEY = "PORT";
+
+
+ /**
+ *
+ */
+ public HiveDriver() {
+ // TODO Auto-generated constructor stub
+ SecurityManager security = System.getSecurityManager();
+ if (security != null) {
+ security.checkWrite("foobah");
+ }
+ }
+
+ /**
+ * Checks whether a given url is in a valid format.
+ *
+ * The current uri format is: jdbc:hive://[host[:port]]
+ *
+ * jdbc:hive:// - run in embedded mode jdbc:hive://localhost - connect to
+ * localhost default port (10000) jdbc:hive://localhost:5050 - connect to
+ * localhost port 5050
+ *
+ * TODO: - write a better regex. - decide on uri format
+ */
+
+ public boolean acceptsURL(String url) throws SQLException {
+ return Pattern.matches(URL_PREFIX + ".*", url);
+ }
+
+ public Connection connect(String url, Properties info) throws SQLException {
+ return new HiveConnection(url, info);
+ }
+
+ /**
+ * Package scoped access to the Driver's Major Version
+ * @return The Major version number of the driver. -1 if it cannot be determined from the
+ * manifest.mf file.
+ */
+ static int getMajorDriverVersion() {
+ int version = -1;
+ try {
+ String fullVersion = HiveDriver.fetchManifestAttribute(
+ Attributes.Name.IMPLEMENTATION_VERSION);
+ String[] tokens = fullVersion.split("\\."); //$NON-NLS-1$
+
+ if(tokens != null && tokens.length > 0 && tokens[0] != null) {
+ version = Integer.parseInt(tokens[0]);
+ }
+ } catch (Exception e) {
+ // Possible reasons to end up here:
+ // - Unable to read version from manifest.mf
+ // - Version string is not in the proper X.x.xxx format
+ version = -1;
+ }
+ return version;
+ }
+
+ /**
+ * Package scoped access to the Driver's Minor Version
+ * @return The Minor version number of the driver. -1 if it cannot be determined from the
+ * manifest.mf file.
+ */
+ static int getMinorDriverVersion() {
+ int version = -1;
+ try {
+ String fullVersion = HiveDriver.fetchManifestAttribute(
+ Attributes.Name.IMPLEMENTATION_VERSION);
+ String[] tokens = fullVersion.split("\\."); //$NON-NLS-1$
+
+ if(tokens != null && tokens.length > 1 && tokens[1] != null) {
+ version = Integer.parseInt(tokens[1]);
+ }
+ } catch (Exception e) {
+ // Possible reasons to end up here:
+ // - Unable to read version from manifest.mf
+ // - Version string is not in the proper X.x.xxx format
+ version = -1;
+ }
+ return version;
+ }
+
+ /**
+ * Returns the major version of this driver.
+ */
+ public int getMajorVersion() {
+ return HiveDriver.getMajorDriverVersion();
+ }
+
+ /**
+ * Returns the minor version of this driver.
+ */
+ public int getMinorVersion() {
+ return HiveDriver.getMinorDriverVersion();
+ }
+
+ public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
+ if (info == null) {
+ info = new Properties();
+ }
+
+ if ((url != null) && url.startsWith(URL_PREFIX)) {
+ info = parseURL(url, info);
+ }
+
+ DriverPropertyInfo hostProp = new DriverPropertyInfo(HOST_PROPERTY_KEY,
+ info.getProperty(HOST_PROPERTY_KEY, ""));
+ hostProp.required = false;
+ hostProp.description = "Hostname of Hive Server2";
+
+ DriverPropertyInfo portProp = new DriverPropertyInfo(PORT_PROPERTY_KEY,
+ info.getProperty(PORT_PROPERTY_KEY, ""));
+ portProp.required = false;
+ portProp.description = "Port number of Hive Server2";
+
+ DriverPropertyInfo dbProp = new DriverPropertyInfo(DBNAME_PROPERTY_KEY,
+ info.getProperty(DBNAME_PROPERTY_KEY, "default"));
+ dbProp.required = false;
+ dbProp.description = "Database name";
+
+ DriverPropertyInfo[] dpi = new DriverPropertyInfo[3];
+
+ dpi[0] = hostProp;
+ dpi[1] = portProp;
+ dpi[2] = dbProp;
+
+ return dpi;
+ }
+
+ /**
+ * Returns whether the driver is JDBC compliant.
+ */
+
+ public boolean jdbcCompliant() {
+ return JDBC_COMPLIANT;
+ }
+
+ /**
+ * Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and
+ * parses it. Everything after jdbc:hive// is optional.
+ *
+ * @param url
+ * @param defaults
+ * @return
+ * @throws java.sql.SQLException
+ */
+ private Properties parseURL(String url, Properties defaults) throws SQLException {
+ Properties urlProps = (defaults != null) ? new Properties(defaults)
+ : new Properties();
+
+ if (url == null || !url.startsWith(URL_PREFIX)) {
+ throw new SQLException("Invalid connection url: " + url);
+ }
+
+ if (url.length() <= URL_PREFIX.length()) {
+ return urlProps;
+ }
+
+ // [hostname]:[port]/[db_name]
+ String connectionInfo = url.substring(URL_PREFIX.length());
+
+ // [hostname]:[port] [db_name]
+ String[] hostPortAndDatabase = connectionInfo.split("/", 2);
+
+ // [hostname]:[port]
+ if (hostPortAndDatabase[0].length() > 0) {
+ String[] hostAndPort = hostPortAndDatabase[0].split(":", 2);
+ urlProps.put(HOST_PROPERTY_KEY, hostAndPort[0]);
+ if (hostAndPort.length > 1) {
+ urlProps.put(PORT_PROPERTY_KEY, hostAndPort[1]);
+ } else {
+ urlProps.put(PORT_PROPERTY_KEY, DEFAULT_PORT);
+ }
+ }
+
+ // [db_name]
+ if (hostPortAndDatabase.length > 1) {
+ urlProps.put(DBNAME_PROPERTY_KEY, hostPortAndDatabase[1]);
+ }
+
+ return urlProps;
+ }
+
+ /**
+ * Lazy-load manifest attributes as needed.
+ */
+ private static Attributes manifestAttributes = null;
+
+ /**
+ * Loads the manifest attributes from the jar.
+ *
+ * @throws java.net.MalformedURLException
+ * @throws IOException
+ */
+ private static synchronized void loadManifestAttributes() throws IOException {
+ if (manifestAttributes != null) {
+ return;
+ }
+ Class> clazz = HiveDriver.class;
+ String classContainer = clazz.getProtectionDomain().getCodeSource()
+ .getLocation().toString();
+ URL manifestUrl = new URL("jar:" + classContainer
+ + "!/META-INF/MANIFEST.MF");
+ Manifest manifest = new Manifest(manifestUrl.openStream());
+ manifestAttributes = manifest.getMainAttributes();
+ }
+
+ /**
+ * Package scoped to allow manifest fetching from other HiveDriver classes
+ * Helper to initialize attributes and return one.
+ *
+ * @param attributeName
+ * @return
+ * @throws SQLException
+ */
+ static String fetchManifestAttribute(Attributes.Name attributeName)
+ throws SQLException {
+ try {
+ loadManifestAttributes();
+ } catch (IOException e) {
+ throw new SQLException("Couldn't load manifest attributes.", e);
+ }
+ return manifestAttributes.getValue(attributeName);
+ }
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java
new file mode 100644
index 0000000..d1ac109
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+public abstract class HiveMetaDataResultSet extends HiveBaseResultSet {
+ protected final List data;
+
+ @SuppressWarnings("unchecked")
+ public HiveMetaDataResultSet(final List columnNames
+ , final List columnTypes
+ , final List data) throws SQLException {
+ if (data!=null) {
+ this.data = new ArrayList(data);
+ } else {
+ this.data = new ArrayList();
+ }
+ if (columnNames!=null) {
+ this.columnNames = new ArrayList(columnNames);
+ } else {
+ this.columnNames = new ArrayList();
+ }
+ if (columnTypes!=null) {
+ this.columnTypes = new ArrayList(columnTypes);
+ } else {
+ this.columnTypes = new ArrayList();
+ }
+ }
+
+ @Override
+ public void close() throws SQLException {
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
new file mode 100644
index 0000000..0d91340
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
@@ -0,0 +1,1280 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
+import java.net.URL;
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.Clob;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.NClob;
+import java.sql.ParameterMetaData;
+import java.sql.PreparedStatement;
+import java.sql.Ref;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.RowId;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hive.service.cli.thrift.TExecuteStatementReq;
+import org.apache.hive.service.cli.thrift.TExecuteStatementResp;
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TSessionHandle;
+
+/**
+ * HivePreparedStatement.
+ *
+ */
+public class HivePreparedStatement implements PreparedStatement {
+ private final String sql;
+ private TCLIService.Iface client;
+ private final TSessionHandle sessHandle;
+ private TOperationHandle stmtHandle;
+ Map sessConf = new HashMap();
+
+ /**
+ * save the SQL parameters {paramLoc:paramValue}
+ */
+ private final HashMap parameters=new HashMap();
+
+ /**
+ * We need to keep a reference to the result set to support the following:
+ *
+ * statement.execute(String sql);
+ * statement.getResultSet();
+ * .
+ */
+ private ResultSet resultSet = null;
+ /**
+ * The maximum number of rows this statement should return (0 => all rows).
+ */
+ private int maxRows = 0;
+
+ /**
+ * Add SQLWarnings to the warningChain if needed.
+ */
+ private SQLWarning warningChain = null;
+
+ /**
+ * Keep state so we can fail certain calls made after close().
+ */
+ private boolean isClosed = false;
+
+ /**
+ * keep the current ResultRet update count
+ */
+ private final int updateCount=0;
+
+ /**
+ *
+ */
+ public HivePreparedStatement(TCLIService.Iface client, TSessionHandle sessHandle,
+ String sql) {
+ this.client = client;
+ this.sessHandle = sessHandle;
+ this.sql = sql;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#addBatch()
+ */
+
+ public void addBatch() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#clearParameters()
+ */
+
+ public void clearParameters() throws SQLException {
+ this.parameters.clear();
+ }
+
+ /**
+ * Invokes executeQuery(sql) using the sql provided to the constructor.
+ *
+ * @return boolean Returns true if a resultSet is created, false if not.
+ * Note: If the result set is empty a true is returned.
+ *
+ * @throws SQLException
+ */
+
+ public boolean execute() throws SQLException {
+ ResultSet rs = executeImmediate(sql);
+ return rs != null;
+ }
+
+ /**
+ * Invokes executeQuery(sql) using the sql provided to the constructor.
+ *
+ * @return ResultSet
+ * @throws SQLException
+ */
+
+ public ResultSet executeQuery() throws SQLException {
+ return executeImmediate(sql);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#executeUpdate()
+ */
+
+ public int executeUpdate() throws SQLException {
+ executeImmediate(sql);
+ return updateCount;
+ }
+
+ /**
+ * Executes the SQL statement.
+ *
+ * @param sql The sql, as a string, to execute
+ * @return ResultSet
+ * @throws SQLException if the prepared statement is closed or there is a database error.
+ * caught Exceptions are thrown as SQLExceptions with the description
+ * "08S01".
+ */
+
+ protected ResultSet executeImmediate(String sql) throws SQLException {
+ if (isClosed) {
+ throw new SQLException("Can't execute after statement has been closed");
+ }
+
+ try {
+ clearWarnings();
+ resultSet = null;
+ if (sql.contains("?")) {
+ sql = updateSql(sql, parameters);
+ }
+ TExecuteStatementReq execReq = new TExecuteStatementReq(sessHandle, sql);
+ execReq.setConfOverlay(sessConf);
+ TExecuteStatementResp execResp = client.ExecuteStatement(execReq);
+ Utils.verifySuccessWithInfo(execResp.getStatus());
+ stmtHandle = execResp.getOperationHandle();
+ } catch (SQLException es) {
+ throw es;
+ } catch (Exception ex) {
+ throw new SQLException(ex.toString(), "08S01", ex);
+ }
+ resultSet = new HiveQueryResultSet.Builder().setClient(client).setSessionHandle(sessHandle)
+ .setStmtHandle(stmtHandle).setMaxRows(maxRows)
+ .build();
+ return resultSet;
+ }
+
+ /**
+ * update the SQL string with parameters set by setXXX methods of {@link PreparedStatement}
+ *
+ * @param sql
+ * @param parameters
+ * @return updated SQL string
+ */
+ private String updateSql(final String sql, HashMap parameters) {
+
+ StringBuffer newSql = new StringBuffer(sql);
+
+ int paramLoc = 1;
+ while (getCharIndexFromSqlByParamLocation(sql, '?', paramLoc) > 0) {
+ // check the user has set the needs parameters
+ if (parameters.containsKey(paramLoc)) {
+ int tt = getCharIndexFromSqlByParamLocation(newSql.toString(), '?', 1);
+ newSql.deleteCharAt(tt);
+ newSql.insert(tt, parameters.get(paramLoc));
+ }
+ paramLoc++;
+ }
+
+ return newSql.toString();
+
+ }
+
+ /**
+ * Get the index of given char from the SQL string by parameter location
+ * The -1 will be return, if nothing found
+ *
+ * @param sql
+ * @param cchar
+ * @param paramLoc
+ * @return
+ */
+ private int getCharIndexFromSqlByParamLocation(final String sql, final char cchar, final int paramLoc) {
+ int signalCount = 0;
+ int charIndex = -1;
+ int num = 0;
+ for (int i = 0; i < sql.length(); i++) {
+ char c = sql.charAt(i);
+ if (c == '\'' || c == '\\')// record the count of char "'" and char "\"
+ {
+ signalCount++;
+ } else if (c == cchar && signalCount % 2 == 0) {// check if the ? is really the parameter
+ num++;
+ if (num == paramLoc) {
+ charIndex = i;
+ break;
+ }
+ }
+ }
+ return charIndex;
+ }
+
+
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#getMetaData()
+ */
+
+ public ResultSetMetaData getMetaData() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#getParameterMetaData()
+ */
+
+ public ParameterMetaData getParameterMetaData() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setArray(int, java.sql.Array)
+ */
+
+ public void setArray(int i, Array x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setAsciiStream(int, java.io.InputStream)
+ */
+
+ public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setAsciiStream(int, java.io.InputStream,
+ * int)
+ */
+
+ public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setAsciiStream(int, java.io.InputStream,
+ * long)
+ */
+
+ public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBigDecimal(int, java.math.BigDecimal)
+ */
+
+ public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBinaryStream(int, java.io.InputStream)
+ */
+
+ public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBinaryStream(int, java.io.InputStream,
+ * int)
+ */
+
+ public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBinaryStream(int, java.io.InputStream,
+ * long)
+ */
+
+ public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBlob(int, java.sql.Blob)
+ */
+
+ public void setBlob(int i, Blob x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBlob(int, java.io.InputStream)
+ */
+
+ public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBlob(int, java.io.InputStream, long)
+ */
+
+ public void setBlob(int parameterIndex, InputStream inputStream, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBoolean(int, boolean)
+ */
+
+ public void setBoolean(int parameterIndex, boolean x) throws SQLException {
+ this.parameters.put(parameterIndex, ""+x);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setByte(int, byte)
+ */
+
+ public void setByte(int parameterIndex, byte x) throws SQLException {
+ this.parameters.put(parameterIndex, ""+x);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setBytes(int, byte[])
+ */
+
+ public void setBytes(int parameterIndex, byte[] x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setCharacterStream(int, java.io.Reader)
+ */
+
+ public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setCharacterStream(int, java.io.Reader,
+ * int)
+ */
+
+ public void setCharacterStream(int parameterIndex, Reader reader, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setCharacterStream(int, java.io.Reader,
+ * long)
+ */
+
+ public void setCharacterStream(int parameterIndex, Reader reader, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setClob(int, java.sql.Clob)
+ */
+
+ public void setClob(int i, Clob x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setClob(int, java.io.Reader)
+ */
+
+ public void setClob(int parameterIndex, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setClob(int, java.io.Reader, long)
+ */
+
+ public void setClob(int parameterIndex, Reader reader, long length) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setDate(int, java.sql.Date)
+ */
+
+ public void setDate(int parameterIndex, Date x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setDate(int, java.sql.Date,
+ * java.util.Calendar)
+ */
+
+ public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setDouble(int, double)
+ */
+
+ public void setDouble(int parameterIndex, double x) throws SQLException {
+ this.parameters.put(parameterIndex,""+x);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setFloat(int, float)
+ */
+
+ public void setFloat(int parameterIndex, float x) throws SQLException {
+ this.parameters.put(parameterIndex,""+x);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setInt(int, int)
+ */
+
+ public void setInt(int parameterIndex, int x) throws SQLException {
+ this.parameters.put(parameterIndex,""+x);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setLong(int, long)
+ */
+
+ public void setLong(int parameterIndex, long x) throws SQLException {
+ this.parameters.put(parameterIndex,""+x);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNCharacterStream(int, java.io.Reader)
+ */
+
+ public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNCharacterStream(int, java.io.Reader,
+ * long)
+ */
+
+ public void setNCharacterStream(int parameterIndex, Reader value, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNClob(int, java.sql.NClob)
+ */
+
+ public void setNClob(int parameterIndex, NClob value) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNClob(int, java.io.Reader)
+ */
+
+ public void setNClob(int parameterIndex, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNClob(int, java.io.Reader, long)
+ */
+
+ public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNString(int, java.lang.String)
+ */
+
+ public void setNString(int parameterIndex, String value) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNull(int, int)
+ */
+
+ public void setNull(int parameterIndex, int sqlType) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setNull(int, int, java.lang.String)
+ */
+
+ public void setNull(int paramIndex, int sqlType, String typeName) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setObject(int, java.lang.Object)
+ */
+
+ public void setObject(int parameterIndex, Object x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setObject(int, java.lang.Object, int)
+ */
+
+ public void setObject(int parameterIndex, Object x, int targetSqlType)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setObject(int, java.lang.Object, int, int)
+ */
+
+ public void setObject(int parameterIndex, Object x, int targetSqlType, int scale)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setRef(int, java.sql.Ref)
+ */
+
+ public void setRef(int i, Ref x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setRowId(int, java.sql.RowId)
+ */
+
+ public void setRowId(int parameterIndex, RowId x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setSQLXML(int, java.sql.SQLXML)
+ */
+
+ public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setShort(int, short)
+ */
+
+ public void setShort(int parameterIndex, short x) throws SQLException {
+ this.parameters.put(parameterIndex,""+x);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setString(int, java.lang.String)
+ */
+
+ public void setString(int parameterIndex, String x) throws SQLException {
+ x=x.replace("'", "\\'");
+ this.parameters.put(parameterIndex,"'"+x+"'");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setTime(int, java.sql.Time)
+ */
+
+ public void setTime(int parameterIndex, Time x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setTime(int, java.sql.Time,
+ * java.util.Calendar)
+ */
+
+ public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setTimestamp(int, java.sql.Timestamp)
+ */
+
+ public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException {
+ this.parameters.put(parameterIndex, x.toString());
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setTimestamp(int, java.sql.Timestamp,
+ * java.util.Calendar)
+ */
+
+ public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setURL(int, java.net.URL)
+ */
+
+ public void setURL(int parameterIndex, URL x) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.PreparedStatement#setUnicodeStream(int, java.io.InputStream,
+ * int)
+ */
+
+ public void setUnicodeStream(int parameterIndex, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#addBatch(java.lang.String)
+ */
+
+ public void addBatch(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#cancel()
+ */
+
+ public void cancel() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#clearBatch()
+ */
+
+ public void clearBatch() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#clearWarnings()
+ */
+
+ public void clearWarnings() throws SQLException {
+ warningChain=null;
+ }
+
+ /**
+ * Closes the prepared statement.
+ *
+ * @throws SQLException
+ */
+
+ public void close() throws SQLException {
+ client = null;
+ if (resultSet!=null) {
+ resultSet.close();
+ resultSet = null;
+ }
+ isClosed = true;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String)
+ */
+
+ public boolean execute(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, int)
+ */
+
+ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, int[])
+ */
+
+ public boolean execute(String sql, int[] columnIndexes) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, java.lang.String[])
+ */
+
+ public boolean execute(String sql, String[] columnNames) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeBatch()
+ */
+
+ public int[] executeBatch() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeQuery(java.lang.String)
+ */
+
+ public ResultSet executeQuery(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String)
+ */
+
+ public int executeUpdate(String sql) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, int)
+ */
+
+ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, int[])
+ */
+
+ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, java.lang.String[])
+ */
+
+ public int executeUpdate(String sql, String[] columnNames) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getConnection()
+ */
+
+ public Connection getConnection() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getFetchDirection()
+ */
+
+ public int getFetchDirection() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getFetchSize()
+ */
+
+ public int getFetchSize() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getGeneratedKeys()
+ */
+
+ public ResultSet getGeneratedKeys() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMaxFieldSize()
+ */
+
+ public int getMaxFieldSize() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMaxRows()
+ */
+
+ public int getMaxRows() throws SQLException {
+ return this.maxRows;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMoreResults()
+ */
+
+ public boolean getMoreResults() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMoreResults(int)
+ */
+
+ public boolean getMoreResults(int current) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getQueryTimeout()
+ */
+
+ public int getQueryTimeout() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSet()
+ */
+
+ public ResultSet getResultSet() throws SQLException {
+ return this.resultSet;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetConcurrency()
+ */
+
+ public int getResultSetConcurrency() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetHoldability()
+ */
+
+ public int getResultSetHoldability() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetType()
+ */
+
+ public int getResultSetType() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getUpdateCount()
+ */
+
+ public int getUpdateCount() throws SQLException {
+ return updateCount;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getWarnings()
+ */
+
+ public SQLWarning getWarnings() throws SQLException {
+ return warningChain;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#isClosed()
+ */
+
+ public boolean isClosed() throws SQLException {
+ return isClosed;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#isPoolable()
+ */
+
+ public boolean isPoolable() throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setCursorName(java.lang.String)
+ */
+
+ public void setCursorName(String name) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setEscapeProcessing(boolean)
+ */
+
+ public void setEscapeProcessing(boolean enable) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setFetchDirection(int)
+ */
+
+ public void setFetchDirection(int direction) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setFetchSize(int)
+ */
+
+ public void setFetchSize(int rows) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setMaxFieldSize(int)
+ */
+
+ public void setMaxFieldSize(int max) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setMaxRows(int)
+ */
+
+ public void setMaxRows(int max) throws SQLException {
+ if (max < 0) {
+ throw new SQLException("max must be >= 0");
+ }
+ this.maxRows = max;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setPoolable(boolean)
+ */
+
+ public void setPoolable(boolean poolable) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setQueryTimeout(int)
+ */
+
+ public void setQueryTimeout(int seconds) throws SQLException {
+ // TODO Auto-generated method stub
+ // throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
+ */
+
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#unwrap(java.lang.Class)
+ */
+
+ public T unwrap(Class iface) throws SQLException {
+ // TODO Auto-generated method stub
+ throw new SQLException("Method not supported");
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
new file mode 100644
index 0000000..7ac2fac
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
@@ -0,0 +1,277 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import static org.apache.hive.service.cli.thrift.TCLIServiceConstants.TYPE_NAMES;
+
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TColumnDesc;
+import org.apache.hive.service.cli.thrift.TFetchOrientation;
+import org.apache.hive.service.cli.thrift.TFetchResultsReq;
+import org.apache.hive.service.cli.thrift.TFetchResultsResp;
+import org.apache.hive.service.cli.thrift.TGetResultSetMetadataReq;
+import org.apache.hive.service.cli.thrift.TGetResultSetMetadataResp;
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+import org.apache.hive.service.cli.thrift.TRow;
+import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.hive.service.cli.thrift.TTableSchema;
+
+/**
+ * HiveQueryResultSet.
+ *
+ */
+public class HiveQueryResultSet extends HiveBaseResultSet {
+
+ public static final Log LOG = LogFactory.getLog(HiveQueryResultSet.class);
+
+ private TCLIService.Iface client;
+ private TOperationHandle stmtHandle;
+ private TSessionHandle sessHandle;
+ private int maxRows;
+ private int fetchSize;
+ private int rowsFetched = 0;
+
+ private List fetchedRows;
+ private Iterator fetchedRowsItr;
+ private boolean isClosed = false;
+ private boolean emptyResultSet = false;
+
+ public static class Builder {
+
+ private TCLIService.Iface client = null;
+ private TOperationHandle stmtHandle = null;
+ private TSessionHandle sessHandle = null;
+
+ /**
+ * Sets the limit for the maximum number of rows that any ResultSet object produced by this
+ * Statement can contain to the given number. If the limit is exceeded, the excess rows
+ * are silently dropped. The value must be >= 0, and 0 means there is not limit.
+ */
+ private int maxRows = 0;
+ private boolean retrieveSchema = true;
+ private List colNames;
+ private List colTypes;
+ private int fetchSize = 50;
+ private boolean emptyResultSet = false;
+
+ public Builder setClient(TCLIService.Iface client) {
+ this.client = client;
+ return this;
+ }
+
+ public Builder setStmtHandle(TOperationHandle stmtHandle) {
+ this.stmtHandle = stmtHandle;
+ return this;
+ }
+
+ public Builder setSessionHandle(TSessionHandle sessHandle) {
+ this.sessHandle = sessHandle;
+ return this;
+ }
+
+ public Builder setMaxRows(int maxRows) {
+ this.maxRows = maxRows;
+ return this;
+ }
+
+ public Builder setSchema(List colNames, List colTypes) {
+ this.colNames = new ArrayList();
+ this.colNames.addAll(colNames);
+ this.colTypes = new ArrayList();
+ this.colTypes.addAll(colTypes);
+ this.retrieveSchema = false;
+ return this;
+ }
+
+ public Builder setFetchSize(int fetchSize) {
+ this.fetchSize = fetchSize;
+ return this;
+ }
+
+ public Builder setEmptyResultSet(boolean emptyResultSet) {
+ this.emptyResultSet = emptyResultSet;
+ return this;
+ }
+
+ public HiveQueryResultSet build() throws SQLException {
+ return new HiveQueryResultSet(this);
+ }
+ }
+
+ protected HiveQueryResultSet(Builder builder) throws SQLException {
+ this.client = builder.client;
+ this.stmtHandle = builder.stmtHandle;
+ this.sessHandle = builder.sessHandle;
+ this.fetchSize = builder.fetchSize;
+ columnNames = new ArrayList();
+ columnTypes = new ArrayList();
+ if (builder.retrieveSchema) {
+ retrieveSchema();
+ } else {
+ this.columnNames.addAll(builder.colNames);
+ this.columnTypes.addAll(builder.colTypes);
+ }
+ this.emptyResultSet = builder.emptyResultSet;
+ if (builder.emptyResultSet) {
+ this.maxRows = 0;
+ } else {
+ this.maxRows = builder.maxRows;
+ }
+ }
+
+ /**
+ * Retrieve schema from the server
+ */
+ private void retrieveSchema() throws SQLException {
+ try {
+ TGetResultSetMetadataReq metadataReq = new TGetResultSetMetadataReq(stmtHandle);
+ // TODO need session handle
+ TGetResultSetMetadataResp metadataResp = client.GetResultSetMetadata(metadataReq);
+ Utils.verifySuccess(metadataResp.getStatus());
+
+ StringBuilder namesSb = new StringBuilder();
+ StringBuilder typesSb = new StringBuilder();
+
+ TTableSchema schema = metadataResp.getSchema();
+ if (schema == null || !schema.isSetColumns()) {
+ // TODO: should probably throw an exception here.
+ return;
+ }
+ setSchema(new TableSchema(schema));
+
+ List columns = schema.getColumns();
+ for (int pos = 0; pos < schema.getColumnsSize(); pos++) {
+ if (pos != 0) {
+ namesSb.append(",");
+ typesSb.append(",");
+ }
+ String columnName = columns.get(pos).getColumnName();
+ columnNames.add(columnName);
+ String columnTypeName = TYPE_NAMES.get(
+ columns.get(pos).getTypeDesc().getTypes().get(0).getPrimitiveEntry().getType());
+ columnTypes.add(columnTypeName);
+ }
+ } catch (SQLException eS) {
+ throw eS; // rethrow the SQLException as is
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Set the specified schema to the resultset
+ * @param colNames
+ * @param colTypes
+ */
+ public void setSchema(List colNames, List colTypes) {
+ columnNames.addAll(colNames);
+ columnTypes.addAll(colTypes);
+ }
+
+ @Override
+ public void close() throws SQLException {
+ // Need reset during re-open when needed
+ client = null;
+ stmtHandle = null;
+ sessHandle = null;
+ isClosed = true;
+ }
+
+ /**
+ * Moves the cursor down one row from its current position.
+ *
+ * @see java.sql.ResultSet#next()
+ * @throws SQLException
+ * if a database access error occurs.
+ */
+ public boolean next() throws SQLException {
+ if (isClosed) {
+ throw new SQLException("Resultset is closed");
+ }
+ if (emptyResultSet || (maxRows > 0 && rowsFetched >= maxRows)) {
+ return false;
+ }
+
+ try {
+ if (fetchedRows == null || !fetchedRowsItr.hasNext()) {
+ TFetchResultsReq fetchReq = new TFetchResultsReq(stmtHandle,
+ TFetchOrientation.FETCH_NEXT, fetchSize);
+ TFetchResultsResp fetchResp = client.FetchResults(fetchReq);
+ Utils.verifySuccessWithInfo(fetchResp.getStatus());
+ fetchedRows = fetchResp.getResults().getRows();
+ fetchedRowsItr = fetchedRows.iterator();
+ }
+
+ String rowStr = "";
+ if (fetchedRowsItr.hasNext()) {
+ row = fetchedRowsItr.next();
+ } else {
+ return false;
+ }
+
+ rowsFetched++;
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Fetched row string: " + rowStr);
+ }
+
+ } catch (SQLException eS) {
+ throw eS;
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ throw new SQLException("Error retrieving next row", ex);
+ }
+ // NOTE: fetchOne dosn't throw new SQLException("Method not supported").
+ return true;
+ }
+
+ @Override
+ public ResultSetMetaData getMetaData() throws SQLException {
+ if (isClosed) {
+ throw new SQLException("Resultset is closed");
+ }
+ return super.getMetaData();
+ }
+
+ @Override
+ public void setFetchSize(int rows) throws SQLException {
+ if (isClosed) {
+ throw new SQLException("Resultset is closed");
+ }
+ fetchSize = rows;
+ }
+
+ @Override
+ public int getFetchSize() throws SQLException {
+ if (isClosed) {
+ throw new SQLException("Resultset is closed");
+ }
+ return fetchSize;
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
new file mode 100644
index 0000000..1eb18b9
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde.serdeConstants;
+
+/**
+ * HiveResultSetMetaData.
+ *
+ */
+public class HiveResultSetMetaData implements java.sql.ResultSetMetaData {
+ private final List columnNames;
+ private final List columnTypes;
+
+ public HiveResultSetMetaData(List columnNames,
+ List columnTypes) {
+ this.columnNames = columnNames;
+ this.columnTypes = columnTypes;
+ }
+
+ public String getCatalogName(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getColumnClassName(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int getColumnCount() throws SQLException {
+ return columnNames.size();
+ }
+
+ public int getColumnDisplaySize(int column) throws SQLException {
+ int columnType = getColumnType(column);
+
+ return JdbcColumn.columnDisplaySize(columnType);
+ }
+
+ public String getColumnLabel(int column) throws SQLException {
+ return columnNames.get(column - 1);
+ }
+
+ public String getColumnName(int column) throws SQLException {
+ return columnNames.get(column - 1);
+ }
+
+ public int getColumnType(int column) throws SQLException {
+ if (columnTypes == null) {
+ throw new SQLException(
+ "Could not determine column type name for ResultSet");
+ }
+
+ if (column < 1 || column > columnTypes.size()) {
+ throw new SQLException("Invalid column value: " + column);
+ }
+
+ // we need to convert the thrift type to the SQL type
+ String type = columnTypes.get(column - 1);
+
+ // we need to convert the thrift type to the SQL type
+ return Utils.hiveTypeToSqlType(type);
+ }
+
+ public String getColumnTypeName(int column) throws SQLException {
+ if (columnTypes == null) {
+ throw new SQLException(
+ "Could not determine column type name for ResultSet");
+ }
+
+ if (column < 1 || column > columnTypes.size()) {
+ throw new SQLException("Invalid column value: " + column);
+ }
+
+ // we need to convert the Hive type to the SQL type name
+ // TODO: this would be better handled in an enum
+ String type = columnTypes.get(column - 1);
+ if ("string".equalsIgnoreCase(type)) {
+ return serdeConstants.STRING_TYPE_NAME;
+ } else if ("float".equalsIgnoreCase(type)) {
+ return serdeConstants.FLOAT_TYPE_NAME;
+ } else if ("double".equalsIgnoreCase(type)) {
+ return serdeConstants.DOUBLE_TYPE_NAME;
+ } else if ("boolean".equalsIgnoreCase(type)) {
+ return serdeConstants.BOOLEAN_TYPE_NAME;
+ } else if ("tinyint".equalsIgnoreCase(type)) {
+ return serdeConstants.TINYINT_TYPE_NAME;
+ } else if ("smallint".equalsIgnoreCase(type)) {
+ return serdeConstants.SMALLINT_TYPE_NAME;
+ } else if ("int".equalsIgnoreCase(type)) {
+ return serdeConstants.INT_TYPE_NAME;
+ } else if ("bigint".equalsIgnoreCase(type)) {
+ return serdeConstants.BIGINT_TYPE_NAME;
+ } else if ("timestamp".equalsIgnoreCase(type)) {
+ return serdeConstants.TIMESTAMP_TYPE_NAME;
+ } else if ("decimal".equalsIgnoreCase(type)) {
+ return serdeConstants.DECIMAL_TYPE_NAME;
+ } else if (type.startsWith("map<")) {
+ return serdeConstants.STRING_TYPE_NAME;
+ } else if (type.startsWith("array<")) {
+ return serdeConstants.STRING_TYPE_NAME;
+ } else if (type.startsWith("struct<")) {
+ return serdeConstants.STRING_TYPE_NAME;
+ }
+
+ throw new SQLException("Unrecognized column type: " + type);
+ }
+
+ public int getPrecision(int column) throws SQLException {
+ int columnType = getColumnType(column);
+
+ return JdbcColumn.columnPrecision(columnType);
+ }
+
+ public int getScale(int column) throws SQLException {
+ int columnType = getColumnType(column);
+
+ return JdbcColumn.columnScale(columnType);
+ }
+
+ public String getSchemaName(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public String getTableName(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isAutoIncrement(int column) throws SQLException {
+ // Hive doesn't have an auto-increment concept
+ return false;
+ }
+
+ public boolean isCaseSensitive(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isCurrency(int column) throws SQLException {
+ // Hive doesn't support a currency type
+ return false;
+ }
+
+ public boolean isDefinitelyWritable(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public int isNullable(int column) throws SQLException {
+ // Hive doesn't have the concept of not-null
+ return ResultSetMetaData.columnNullable;
+ }
+
+ public boolean isReadOnly(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isSearchable(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isSigned(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isWritable(int column) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ public T unwrap(Class iface) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
new file mode 100644
index 0000000..aeae800
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -0,0 +1,566 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCancelOperationReq;
+import org.apache.hive.service.cli.thrift.TCancelOperationResp;
+import org.apache.hive.service.cli.thrift.TCloseOperationReq;
+import org.apache.hive.service.cli.thrift.TCloseOperationResp;
+import org.apache.hive.service.cli.thrift.TExecuteStatementReq;
+import org.apache.hive.service.cli.thrift.TExecuteStatementResp;
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+import org.apache.hive.service.cli.thrift.TSessionHandle;
+
+/**
+ * HiveStatement.
+ *
+ */
+public class HiveStatement implements java.sql.Statement {
+ private TCLIService.Iface client;
+ private TOperationHandle stmtHandle;
+ private final TSessionHandle sessHandle;
+ Map sessConf = new HashMap();
+ private int fetchSize = 50;
+ /**
+ * We need to keep a reference to the result set to support the following:
+ *
+ * statement.execute(String sql);
+ * statement.getResultSet();
+ * .
+ */
+ private ResultSet resultSet = null;
+
+ /**
+ * Sets the limit for the maximum number of rows that any ResultSet object produced by this
+ * Statement can contain to the given number. If the limit is exceeded, the excess rows
+ * are silently dropped. The value must be >= 0, and 0 means there is not limit.
+ */
+ private int maxRows = 0;
+
+ /**
+ * Add SQLWarnings to the warningChain if needed.
+ */
+ private SQLWarning warningChain = null;
+
+ /**
+ * Keep state so we can fail certain calls made after close().
+ */
+ private boolean isClosed = false;
+
+ /**
+ *
+ */
+ public HiveStatement(TCLIService.Iface client, TSessionHandle sessHandle) {
+ this.client = client;
+ this.sessHandle = sessHandle;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#addBatch(java.lang.String)
+ */
+
+ public void addBatch(String sql) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#cancel()
+ */
+
+ public void cancel() throws SQLException {
+ if (isClosed) {
+ throw new SQLException("Can't cancel after statement has been closed");
+ }
+
+ TCancelOperationReq cancelReq = new TCancelOperationReq();
+ cancelReq.setOperationHandle(stmtHandle);
+ try {
+ TCancelOperationResp cancelResp = client.CancelOperation(cancelReq);
+ Utils.verifySuccessWithInfo(cancelResp.getStatus());
+ } catch (SQLException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new SQLException(e.toString(), "08S01");
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#clearBatch()
+ */
+
+ public void clearBatch() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#clearWarnings()
+ */
+
+ public void clearWarnings() throws SQLException {
+ warningChain = null;
+ }
+
+ private void closeClientOperation() throws SQLException {
+ try {
+ if (stmtHandle != null) {
+ TCloseOperationReq closeReq = new TCloseOperationReq();
+ closeReq.setOperationHandle(stmtHandle);
+ TCloseOperationResp closeResp = client.CloseOperation(closeReq);
+ Utils.verifySuccessWithInfo(closeResp.getStatus());
+ }
+ } catch (SQLException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new SQLException(e.toString(), "08S01");
+ }
+ stmtHandle = null;
+ }
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#close()
+ */
+
+ public void close() throws SQLException {
+ if (isClosed) {
+ return;
+ }
+ closeClientOperation();
+ client = null;
+ resultSet = null;
+ isClosed = true;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String)
+ */
+
+ public boolean execute(String sql) throws SQLException {
+ if (isClosed) {
+ throw new SQLException("Can't execute after statement has been closed");
+ }
+
+ try {
+ closeClientOperation();
+ TExecuteStatementReq execReq = new TExecuteStatementReq(sessHandle, sql);
+ execReq.setConfOverlay(sessConf);
+ TExecuteStatementResp execResp = client.ExecuteStatement(execReq);
+ Utils.verifySuccessWithInfo(execResp.getStatus());
+ stmtHandle = execResp.getOperationHandle();
+ } catch (SQLException eS) {
+ throw eS;
+ } catch (Exception ex) {
+ throw new SQLException(ex.toString(), "08S01");
+ }
+
+ if (!stmtHandle.isHasResultSet()) {
+ return false;
+ }
+ resultSet = new HiveQueryResultSet.Builder().setClient(client).setSessionHandle(sessHandle)
+ .setStmtHandle(stmtHandle).setMaxRows(maxRows).setFetchSize(fetchSize)
+ .build();
+ return true;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, int)
+ */
+
+ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, int[])
+ */
+
+ public boolean execute(String sql, int[] columnIndexes) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#execute(java.lang.String, java.lang.String[])
+ */
+
+ public boolean execute(String sql, String[] columnNames) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeBatch()
+ */
+
+ public int[] executeBatch() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeQuery(java.lang.String)
+ */
+
+ public ResultSet executeQuery(String sql) throws SQLException {
+ if (!execute(sql)) {
+ throw new SQLException("The query did not generate a result set!");
+ }
+ return resultSet;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String)
+ */
+
+ public int executeUpdate(String sql) throws SQLException {
+ execute(sql);
+ return 0;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, int)
+ */
+
+ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, int[])
+ */
+
+ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#executeUpdate(java.lang.String, java.lang.String[])
+ */
+
+ public int executeUpdate(String sql, String[] columnNames) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getConnection()
+ */
+
+ public Connection getConnection() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getFetchDirection()
+ */
+
+ public int getFetchDirection() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getFetchSize()
+ */
+
+ public int getFetchSize() throws SQLException {
+ return fetchSize;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getGeneratedKeys()
+ */
+
+ public ResultSet getGeneratedKeys() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMaxFieldSize()
+ */
+
+ public int getMaxFieldSize() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMaxRows()
+ */
+
+ public int getMaxRows() throws SQLException {
+ return maxRows;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMoreResults()
+ */
+
+ public boolean getMoreResults() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getMoreResults(int)
+ */
+
+ public boolean getMoreResults(int current) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getQueryTimeout()
+ */
+
+ public int getQueryTimeout() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSet()
+ */
+
+ public ResultSet getResultSet() throws SQLException {
+ return resultSet;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetConcurrency()
+ */
+
+ public int getResultSetConcurrency() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetHoldability()
+ */
+
+ public int getResultSetHoldability() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getResultSetType()
+ */
+
+ public int getResultSetType() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getUpdateCount()
+ */
+
+ public int getUpdateCount() throws SQLException {
+ return 0;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#getWarnings()
+ */
+
+ public SQLWarning getWarnings() throws SQLException {
+ return warningChain;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#isClosed()
+ */
+
+ public boolean isClosed() throws SQLException {
+ return isClosed;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#isPoolable()
+ */
+
+ public boolean isPoolable() throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setCursorName(java.lang.String)
+ */
+
+ public void setCursorName(String name) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setEscapeProcessing(boolean)
+ */
+
+ public void setEscapeProcessing(boolean enable) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setFetchDirection(int)
+ */
+
+ public void setFetchDirection(int direction) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setFetchSize(int)
+ */
+
+ public void setFetchSize(int rows) throws SQLException {
+ fetchSize = rows;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setMaxFieldSize(int)
+ */
+
+ public void setMaxFieldSize(int max) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setMaxRows(int)
+ */
+
+ public void setMaxRows(int max) throws SQLException {
+ if (max < 0) {
+ throw new SQLException("max must be >= 0");
+ }
+ maxRows = max;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setPoolable(boolean)
+ */
+
+ public void setPoolable(boolean poolable) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Statement#setQueryTimeout(int)
+ */
+
+ public void setQueryTimeout(int seconds) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
+ */
+
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.sql.Wrapper#unwrap(java.lang.Class)
+ */
+
+ public T unwrap(Class iface) throws SQLException {
+ throw new SQLException("Method not supported");
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
new file mode 100644
index 0000000..25c3f3f
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.sql.SQLException;
+import java.sql.Types;
+
+
+/**
+ * Column metadata.
+ */
+public class JdbcColumn {
+ private final String columnName;
+ private final String tableName;
+ private final String tableCatalog;
+ private final String type;
+ private final String comment;
+ private final int ordinalPos;
+
+ JdbcColumn(String columnName, String tableName, String tableCatalog
+ , String type, String comment, int ordinalPos) {
+ this.columnName = columnName;
+ this.tableName = tableName;
+ this.tableCatalog = tableCatalog;
+ this.type = type;
+ this.comment = comment;
+ this.ordinalPos = ordinalPos;
+ }
+
+ public String getColumnName() {
+ return columnName;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ public String getTableCatalog() {
+ return tableCatalog;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public Integer getSqlType() throws SQLException {
+ return Utils.hiveTypeToSqlType(type);
+ }
+
+ static int columnDisplaySize(int columnType) throws SQLException {
+ // according to hiveTypeToSqlType possible options are:
+ switch(columnType) {
+ case Types.BOOLEAN:
+ return columnPrecision(columnType);
+ case Types.VARCHAR:
+ return Integer.MAX_VALUE; // hive has no max limit for strings
+ case Types.TINYINT:
+ case Types.SMALLINT:
+ case Types.INTEGER:
+ case Types.BIGINT:
+ return columnPrecision(columnType) + 1; // allow +/-
+ case Types.TIMESTAMP:
+ return columnPrecision(columnType);
+
+ // see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT
+ case Types.FLOAT:
+ return 24; // e.g. -(17#).e-###
+ // see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Double.MAX_EXPONENT
+ case Types.DOUBLE:
+ return 25; // e.g. -(17#).e-####
+ case Types.DECIMAL:
+ return Integer.MAX_VALUE;
+ default:
+ throw new SQLException("Invalid column type: " + columnType);
+ }
+ }
+
+ static int columnPrecision(int columnType) throws SQLException {
+ // according to hiveTypeToSqlType possible options are:
+ switch(columnType) {
+ case Types.BOOLEAN:
+ return 1;
+ case Types.VARCHAR:
+ return Integer.MAX_VALUE; // hive has no max limit for strings
+ case Types.TINYINT:
+ return 3;
+ case Types.SMALLINT:
+ return 5;
+ case Types.INTEGER:
+ return 10;
+ case Types.BIGINT:
+ return 19;
+ case Types.FLOAT:
+ return 7;
+ case Types.DOUBLE:
+ return 15;
+ case Types.TIMESTAMP:
+ return 29;
+ case Types.DECIMAL:
+ return Integer.MAX_VALUE;
+ default:
+ throw new SQLException("Invalid column type: " + columnType);
+ }
+ }
+
+ static int columnScale(int columnType) throws SQLException {
+ // according to hiveTypeToSqlType possible options are:
+ switch(columnType) {
+ case Types.BOOLEAN:
+ case Types.VARCHAR:
+ case Types.TINYINT:
+ case Types.SMALLINT:
+ case Types.INTEGER:
+ case Types.BIGINT:
+ return 0;
+ case Types.FLOAT:
+ return 7;
+ case Types.DOUBLE:
+ return 15;
+ case Types.TIMESTAMP:
+ return 9;
+ case Types.DECIMAL:
+ return Integer.MAX_VALUE;
+ default:
+ throw new SQLException("Invalid column type: " + columnType);
+ }
+ }
+
+ public Integer getColumnSize() throws SQLException {
+ int precision = columnPrecision(Utils.hiveTypeToSqlType(type));
+
+ return precision == 0 ? null : precision;
+ }
+
+ public Integer getDecimalDigits() throws SQLException {
+ return columnScale(Utils.hiveTypeToSqlType(type));
+ }
+
+ public Integer getNumPrecRadix() {
+ if (type.equalsIgnoreCase("tinyint")) {
+ return 10;
+ } else if (type.equalsIgnoreCase("smallint")) {
+ return 10;
+ } else if (type.equalsIgnoreCase("int")) {
+ return 10;
+ } else if (type.equalsIgnoreCase("bigint")) {
+ return 10;
+ } else if (type.equalsIgnoreCase("float")) {
+ return 2;
+ } else if (type.equalsIgnoreCase("double")) {
+ return 2;
+ } else if (type.equalsIgnoreCase("decimal")) {
+ return 10;
+ } else { // anything else including boolean and string is null
+ return null;
+ }
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public int getOrdinalPos() {
+ return ordinalPos;
+ }
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/JdbcTable.java jdbc/src/java/org/apache/hive/jdbc/JdbcTable.java
new file mode 100644
index 0000000..b0cc53b
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/JdbcTable.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.sql.SQLException;
+
+
+/**
+ * Table metadata.
+ */
+public class JdbcTable {
+ private String tableCatalog;
+ private String tableName;
+ private String type;
+ private String comment;
+
+ public JdbcTable(String tableCatalog, String tableName, String type, String comment) {
+ this.tableCatalog = tableCatalog;
+ this.tableName = tableName;
+ this.type = type;
+ this.comment = comment;
+ }
+
+ public String getTableCatalog() {
+ return tableCatalog;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public String getSqlTableType() throws SQLException {
+ return HiveDatabaseMetaData.toJdbcTableType(type);
+ }
+
+ public String getComment() {
+ return comment;
+ }
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/Utils.java jdbc/src/java/org/apache/hive/jdbc/Utils.java
new file mode 100644
index 0000000..433b2c5
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/Utils.java
@@ -0,0 +1,252 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import java.net.URI;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.hive.service.cli.thrift.TStatus;
+import org.apache.hive.service.cli.thrift.TStatusCode;
+
+public class Utils {
+ /**
+ * The required prefix for the connection URL.
+ */
+ public static final String URL_PREFIX = "jdbc:hive2://";
+
+ /**
+ * If host is provided, without a port.
+ */
+ public static final String DEFAULT_PORT = "10000";
+
+ /**
+ * Hive's default database name
+ */
+ public static final String DEFAULT_DATABASE = "default";
+
+ private static final String URI_JDBC_PREFIX = "jdbc:";
+
+ public static class JdbcConnectionParams {
+ private String host = null;
+ private int port;
+ private String dbName = DEFAULT_DATABASE;
+ private Map hiveConfs = new HashMap();
+ private Map hiveVars = new HashMap();
+ private Map sessionVars = new HashMap();
+ private boolean isEmbeddedMode = false;
+
+ public JdbcConnectionParams() {
+ }
+
+ public String getHost() {
+ return host;
+ }
+ public int getPort() {
+ return port;
+ }
+ public String getDbName() {
+ return dbName;
+ }
+ public Map getHiveConfs() {
+ return hiveConfs;
+ }
+ public Map getHiveVars() {
+ return hiveVars;
+ }
+ public boolean isEmbeddedMode() {
+ return isEmbeddedMode;
+ }
+ public Map getSessionVars() {
+ return sessionVars;
+ }
+
+ public void setHost(String host) {
+ this.host = host;
+ }
+ public void setPort(int port) {
+ this.port = port;
+ }
+ public void setDbName(String dbName) {
+ this.dbName = dbName;
+ }
+ public void setHiveConfs(Map hiveConfs) {
+ this.hiveConfs = hiveConfs;
+ }
+ public void setHiveVars(Map hiveVars) {
+ this.hiveVars = hiveVars;
+ }
+ public void setEmbeddedMode(boolean embeddedMode) {
+ this.isEmbeddedMode = embeddedMode;
+ }
+ public void setSessionVars(Map sessionVars) {
+ this.sessionVars = sessionVars;
+ }
+ }
+
+
+ /**
+ * Convert hive types to sql types.
+ * @param type
+ * @return Integer java.sql.Types values
+ * @throws SQLException
+ */
+ public static int hiveTypeToSqlType(String type) throws SQLException {
+ if ("string".equalsIgnoreCase(type)) {
+ return Types.VARCHAR;
+ } else if ("float".equalsIgnoreCase(type)) {
+ return Types.FLOAT;
+ } else if ("double".equalsIgnoreCase(type)) {
+ return Types.DOUBLE;
+ } else if ("boolean".equalsIgnoreCase(type)) {
+ return Types.BOOLEAN;
+ } else if ("tinyint".equalsIgnoreCase(type)) {
+ return Types.TINYINT;
+ } else if ("smallint".equalsIgnoreCase(type)) {
+ return Types.SMALLINT;
+ } else if ("int".equalsIgnoreCase(type)) {
+ return Types.INTEGER;
+ } else if ("bigint".equalsIgnoreCase(type)) {
+ return Types.BIGINT;
+ } else if ("timestamp".equalsIgnoreCase(type)) {
+ return Types.TIMESTAMP;
+ } else if ("decimal".equalsIgnoreCase(type)) {
+ return Types.DECIMAL;
+ } else if (type.startsWith("map<")) {
+ return Types.VARCHAR;
+ } else if (type.startsWith("array<")) {
+ return Types.VARCHAR;
+ } else if (type.startsWith("struct<")) {
+ return Types.VARCHAR;
+ }
+ throw new SQLException("Unrecognized column type: " + type);
+ }
+
+ // Verify success or success_with_info status, else throw SQLException
+ public static void verifySuccessWithInfo(TStatus status) throws SQLException {
+ verifySuccess(status, true);
+ }
+
+ // Verify success status, else throw SQLException
+ public static void verifySuccess(TStatus status) throws SQLException {
+ verifySuccess(status, false);
+ }
+
+ // Verify success and optionally with_info status, else throw SQLException
+ public static void verifySuccess(TStatus status, boolean withInfo) throws SQLException {
+ if ((status.getStatusCode() != TStatusCode.SUCCESS_STATUS) &&
+ (withInfo && (status.getStatusCode() != TStatusCode.SUCCESS_WITH_INFO_STATUS))) {
+ throw new SQLException(status.getErrorMessage(),
+ status.getSqlState(), status.getErrorCode());
+ }
+ }
+
+ /**
+ * Parse JDBC connection URL
+ * The new format of the URL is jdbc:hive://:/dbName;sess_var_list?hive_conf_list#hive_var_list
+ * where the optional sess, conf and var lists are semicolon separated = pairs. As before, if the
+ * host/port is not specified, it the driver runs an embedded hive.
+ * examples -
+ * jdbc:hive://ubuntu:11000/db2?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
+ * jdbc:hive://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
+ * jdbc:hive://ubuntu:11000/db2;user=foo;password=bar
+ *
+ * Note that currently the session properties are not used.
+ *
+ * @param uri
+ * @return
+ */
+ public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentException {
+ JdbcConnectionParams connParams = new JdbcConnectionParams();
+
+ if (!uri.startsWith(URL_PREFIX)) {
+ throw new IllegalArgumentException("Bad URL format");
+ }
+
+ // Don't parse URL with no other configuration.
+ if (uri.equalsIgnoreCase(URL_PREFIX)) {
+ connParams.setEmbeddedMode(true);
+ return connParams;
+ }
+ URI jdbcURI = URI.create(uri.substring(URI_JDBC_PREFIX.length()));
+
+ connParams.setHost(jdbcURI.getHost());
+ if (connParams.getHost() == null) {
+ connParams.setEmbeddedMode(true);
+ } else {
+ int port = jdbcURI.getPort();
+ if (port == -1) {
+ port = Integer.valueOf(DEFAULT_PORT);
+ }
+ connParams.setPort(port);
+ }
+
+ // key=value pattern
+ Pattern pattern = Pattern.compile("([^;]*)=([^;]*)[;]?");
+
+ // dbname and session settings
+ String sessVars = jdbcURI.getPath();
+ if ((sessVars == null) || sessVars.isEmpty()) {
+ connParams.setDbName(DEFAULT_DATABASE);
+ } else {
+ // removing leading '/' returned by getPath()
+ sessVars = sessVars.substring(1);
+ if (!sessVars.contains(";")) {
+ // only dbname is provided
+ connParams.setDbName(sessVars);
+ } else {
+ // we have dbname followed by session parameters
+ connParams.setDbName(sessVars.substring(0, sessVars.indexOf(';')));
+ sessVars = sessVars.substring(sessVars.indexOf(';')+1);
+ if (sessVars != null) {
+ Matcher sessMatcher = pattern.matcher(sessVars);
+ while (sessMatcher.find()) {
+ connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2));
+ }
+ }
+ }
+ }
+
+ // parse hive conf settings
+ String confStr = jdbcURI.getQuery();
+ if (confStr != null) {
+ Matcher confMatcher = pattern.matcher(confStr);
+ while (confMatcher.find()) {
+ connParams.getHiveConfs().put(confMatcher.group(1), confMatcher.group(2));
+ }
+ }
+
+ // parse hive var settings
+ String varStr = jdbcURI.getFragment();
+ if (varStr != null) {
+ Matcher varMatcher = pattern.matcher(varStr);
+ while (varMatcher.find()) {
+ connParams.getHiveVars().put(varMatcher.group(1), varMatcher.group(2));
+ }
+ }
+
+ return connParams;
+ }
+
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/beeline/HiveBeeline.java jdbc/src/java/org/apache/hive/jdbc/beeline/HiveBeeline.java
deleted file mode 100644
index 300b040..0000000
--- jdbc/src/java/org/apache/hive/jdbc/beeline/HiveBeeline.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.jdbc.beeline;
-
-import java.io.ByteArrayInputStream;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hive.jdbc.beeline.OptionsProcessor.PrintMode;
-import sqlline.SqlLine;
-
-public class HiveBeeline {
-
- // TODO: expose from the JDBC connection class
- private static final String URI_PREFIX = "jdbc:hive://";
- private static final String SQLLINE_CLASS = "sqlline.SqlLine";
- private static final String HIVE_JDBC_DRIVER = "org.apache.hadoop.hive.jdbc.HiveDriver";
- private static final String SQLLINE_SILENT = "--silent=true";
- private static final String SQLLINE_VERBOSE = "--verbose=true";
- private static final String SQLLINE_SCRIPT_CMD = "!run";
- private static final String URL_DB_MARKER = "/";
- private static final String URL_HIVE_CONF_MARKER = "?";
- private static final String URL_HIVE_VAR_MARKER = "#";
- private static final String URL_SESS_VAR_MARKER = ";";
-
- public static void main(String[] args) throws Exception {
- OptionsProcessor oproc = new OptionsProcessor();
- if (!oproc.processArgs(args)) {
- System.exit(1);
- }
-
- // assemble connection URL
- String jdbcURL = URI_PREFIX;
- if (oproc.getHost() != null) {
- // no, host name indicates an embbeded hive invocation
- jdbcURL += oproc.getHost() + ":" + oproc.getPort();
- }
-
- if (!oproc.getDatabase().isEmpty()) {
- jdbcURL += URL_DB_MARKER + oproc.getDatabase();
- }
- if (!oproc.getSessVars().isEmpty()) {
- jdbcURL += URL_SESS_VAR_MARKER + oproc.getSessVars();
- }
- if (!oproc.getHiveConfs().isEmpty()) {
- jdbcURL += URL_HIVE_CONF_MARKER + oproc.getHiveConfs();
- }
- if (!oproc.getHiveVars().isEmpty()) {
- jdbcURL += URL_HIVE_VAR_MARKER + oproc.getHiveVars();
- }
-
- // setup input file or string
- InputStream sqlLineInput = null;
- if (oproc.getFileName() != null) {
- String scriptCmd = SQLLINE_SCRIPT_CMD + " " + oproc.getFileName().trim() + "\n";
- sqlLineInput = new ByteArrayInputStream(scriptCmd.getBytes());
- } else if (oproc.getExecString() != null) {
- // process the string to make each stmt a separate line
- String execString = oproc.getExecString().trim();
- String execCommand = "";
- String command = "";
- for (String oneCmd : execString.split(";")) {
- if (StringUtils.endsWith(oneCmd, "\\")) {
- command += StringUtils.chop(oneCmd) + ";";
- continue;
- } else {
- command += oneCmd;
- }
- if (StringUtils.isBlank(command)) {
- continue;
- }
- execCommand += command + ";\n"; // stmt should end with ';' for sqlLine
- command = "";
- }
- sqlLineInput = new ByteArrayInputStream(execCommand.getBytes());
- }
-
- // setup SQLLine args
- List argList = new ArrayList ();
- argList.add("-u");
- argList.add(jdbcURL);
- argList.add("-d");
- argList.add(HIVE_JDBC_DRIVER); // TODO: make it configurable for HS or HS2
- if (oproc.getpMode() == PrintMode.SILENT) {
- argList.add(SQLLINE_SILENT);
- } else if (oproc.getpMode() == PrintMode.VERBOSE) {
- argList.add(SQLLINE_VERBOSE);
- }
-
- // Invoke sqlline
- SqlLine.mainWithInputRedirection(argList.toArray(new String[0]), sqlLineInput);
- }
-}
diff --git jdbc/src/java/org/apache/hive/jdbc/beeline/OptionsProcessor.java jdbc/src/java/org/apache/hive/jdbc/beeline/OptionsProcessor.java
deleted file mode 100644
index c86a51d..0000000
--- jdbc/src/java/org/apache/hive/jdbc/beeline/OptionsProcessor.java
+++ /dev/null
@@ -1,266 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.jdbc.beeline;
-
-import java.util.Properties;
-
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * OptionsProcessor.
- *
- */
-public class OptionsProcessor {
- enum PrintMode {
- SILENT,
- NORMAL,
- VERBOSE
- };
-
- private static final Log l4j = LogFactory.getLog(OptionsProcessor.class.getName());
- private final Options options = new Options();
- private org.apache.commons.cli.CommandLine commandLine;
-
- /**
- * -database option if any that the session has been invoked with.
- */
- private String database;
-
- /**
- * -e option if any that the session has been invoked with.
- */
- private String execString;
-
- /**
- * -f option if any that the session has been invoked with.
- */
- private String fileName;
-
- /**
- * properties set from -hiveconf via cmdline.
- */
- private final Properties cmdProperties = new Properties();
-
- /**
- * host name and port number of remote Hive server
- */
- private String host;
- private int port;
-
- /**
- * print mode
- */
- private PrintMode pMode = PrintMode.NORMAL;
-
- /**
- * hive var properties
- */
- private String hiveVars;
-
- /**
- * hive conf properties
- */
- private String hiveConfs;
-
- /**
- * hive session properties
- */
- private String sessVars;
-
- @SuppressWarnings("static-access")
- public OptionsProcessor() {
-
- // -database database
- options.addOption(OptionBuilder
- .hasArg()
- .withArgName("databasename")
- .withLongOpt("database")
- .withDescription("Specify the database to use")
- .create());
-
- // -e 'quoted-query-string'
- options.addOption(OptionBuilder
- .hasArg()
- .withArgName("quoted-query-string")
- .withDescription("SQL from command line")
- .create('e'));
-
- // -f
- options.addOption(OptionBuilder
- .hasArg()
- .withArgName("filename")
- .withDescription("SQL from files")
- .create('f'));
-
- // -hiveconf x=y
- options.addOption(OptionBuilder
- .withValueSeparator()
- .hasArgs(2)
- .withArgName("property=value")
- .withLongOpt("hiveconf")
- .withDescription("Use value for given property")
- .create());
-
- // -sessVar x=y
- options.addOption(OptionBuilder
- .withValueSeparator()
- .hasArgs(2)
- .withArgName("property=value")
- .withLongOpt("sessVar")
- .withDescription("Use value for given property")
- .create());
-
- // -h hostname/ippaddress
- options.addOption(OptionBuilder
- .hasArg()
- .withArgName("hostname")
- .withDescription("connecting to Hive Server on remote host")
- .create('h'));
-
- // -p port
- options.addOption(OptionBuilder
- .hasArg()
- .withArgName("port")
- .withDescription("connecting to Hive Server on port number")
- .create('p'));
-
- // Substitution option -d, --define
- options.addOption(OptionBuilder
- .withValueSeparator()
- .hasArgs(2)
- .withArgName("key=value")
- .withLongOpt("define")
- .withDescription("Variable subsitution to apply to hive commands. e.g. -d A=B or --define A=B")
- .create('d'));
-
- // Substitution option --hivevar
- options.addOption(OptionBuilder
- .withValueSeparator()
- .hasArgs(2)
- .withArgName("key=value")
- .withLongOpt("hivevar")
- .withDescription("Variable subsitution to apply to hive commands. e.g. --hivevar A=B")
- .create());
-
- // [-S|--silent]
- options.addOption(new Option("S", "silent", false, "Silent mode in interactive shell"));
-
- // [-v|--verbose]
- options.addOption(new Option("v", "verbose", false, "Verbose mode (echo executed SQL to the console)"));
-
- // [-H|--help]
- options.addOption(new Option("H", "help", false, "Print help information"));
- }
-
- public String getDatabase() {
- return database;
- }
-
- public String getExecString() {
- return execString;
- }
-
- public String getFileName() {
- return fileName;
- }
-
- public String getHost() {
- return host;
- }
-
- public int getPort() {
- return port;
- }
-
- public PrintMode getpMode() {
- return pMode;
- }
-
- public String getHiveVars() {
- return hiveVars;
- }
-
- public String getHiveConfs() {
- return hiveConfs;
- }
-
- public String getSessVars() {
- return sessVars;
- }
-
- public boolean processArgs(String[] argv) {
- try {
- commandLine = new GnuParser().parse(options, argv);
- } catch (ParseException e) {
- System.err.println(e.getMessage());
- printUsage();
- return false;
- }
-
- if (commandLine.hasOption('H')) {
- printUsage();
- return false;
- }
-
- if (commandLine.hasOption('S')) {
- pMode = PrintMode.SILENT;
- } else if (commandLine.hasOption('v')) {
- pMode = PrintMode.VERBOSE;
- } else {
- pMode = PrintMode.NORMAL;
- }
-
- hiveConfs = commandLine.getOptionValue("hiveconf", "");
- hiveVars = commandLine.getOptionValue("define", "");
- hiveVars += commandLine.getOptionValue("hivevar", "");
- sessVars = commandLine.getOptionValue("sessvar", "");
- database = commandLine.getOptionValue("database", "");
- execString = commandLine.getOptionValue('e');
- fileName = commandLine.getOptionValue('f');
- host = (String) commandLine.getOptionValue('h');
- port = Integer.parseInt((String) commandLine.getOptionValue('p', "10000"));
-
- if (execString != null && fileName != null) {
- System.err.println("The '-e' and '-f' options cannot be specified simultaneously");
- printUsage();
- return false;
- }
-
- if (commandLine.hasOption("hiveconf")) {
- Properties confProps = commandLine.getOptionProperties("hiveconf");
- for (String propKey : confProps.stringPropertyNames()) {
- cmdProperties.setProperty(propKey, confProps.getProperty(propKey));
- }
- }
-
- return true;
- }
-
- private void printUsage() {
- new HelpFormatter().printHelp("beeline", options);
- }
-
-}
diff --git jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
new file mode 100644
index 0000000..2e26ce0
--- /dev/null
+++ jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -0,0 +1,1255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
+import static org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.DriverManager;
+import java.sql.DriverPropertyInfo;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * TestJdbcDriver2
+ *
+ */
+public class TestJdbcDriver2 extends TestCase {
+ private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
+ private static final String tableName = "testHiveJdbcDriver_Table";
+ private static final String tableComment = "Simple table";
+ private static final String viewName = "testHiveJdbcDriverView";
+ private static final String viewComment = "Simple view";
+ private static final String partitionedTableName = "testHiveJdbcDriverPartitionedTable";
+ private static final String partitionedColumnName = "partcolabc";
+ private static final String partitionedColumnValue = "20090619";
+ private static final String partitionedTableComment = "Partitioned table";
+ private static final String dataTypeTableName = "testDataTypeTable";
+ private static final String dataTypeTableComment = "Table with many column data types";
+ private final HiveConf conf;
+ private final Path dataFilePath;
+ private final Path dataTypeDataFilePath;
+ private Connection con;
+ private boolean standAloneServer = false;
+
+ public TestJdbcDriver2(String name) {
+ super(name);
+ conf = new HiveConf(TestJdbcDriver2.class);
+ String dataFileDir = conf.get("test.data.files").replace('\\', '/')
+ .replace("c:", "");
+ dataFilePath = new Path(dataFileDir, "kv1.txt");
+ dataTypeDataFilePath = new Path(dataFileDir, "datatypes.txt");
+ standAloneServer = "true".equals(System
+ .getProperty("test.service.standalone.server"));
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ Class.forName(driverName);
+ if (standAloneServer) {
+ // get connection
+ con = DriverManager.getConnection("jdbc:hive2://localhost:10000/default",
+ "", "");
+ } else {
+ con = DriverManager.getConnection("jdbc:hive2://", "", "");
+ }
+ assertNotNull("Connection is null", con);
+ assertFalse("Connection should not be closed", con.isClosed());
+ Statement stmt = con.createStatement();
+ assertNotNull("Statement is null", stmt);
+
+ stmt.execute("set hive.support.concurrency = false");
+
+ // drop table. ignore error.
+ try {
+ stmt.execute("drop table " + tableName);
+ } catch (Exception ex) {
+ fail(ex.toString());
+ }
+
+ ResultSet res;
+ // create table
+ stmt.execute("create table " + tableName
+ + " (under_col int comment 'the under column', value string) comment '"
+ + tableComment + "'");
+
+ // load data
+ stmt.execute("load data local inpath '"
+ + dataFilePath.toString() + "' into table " + tableName);
+
+ // also initialize a paritioned table to test against.
+
+ // drop table. ignore error.
+ try {
+ stmt.execute("drop table " + partitionedTableName);
+ } catch (Exception ex) {
+ fail(ex.toString());
+ }
+
+ stmt.execute("create table " + partitionedTableName
+ + " (under_col int, value string) comment '"+partitionedTableComment
+ +"' partitioned by (" + partitionedColumnName + " STRING)");
+
+ // load data
+ stmt.execute("load data local inpath '"
+ + dataFilePath.toString() + "' into table " + partitionedTableName
+ + " PARTITION (" + partitionedColumnName + "="
+ + partitionedColumnValue + ")");
+
+ // drop table. ignore error.
+ try {
+ stmt.execute("drop table " + dataTypeTableName);
+ } catch (Exception ex) {
+ fail(ex.toString());
+ }
+
+ stmt.execute("create table " + dataTypeTableName
+ + " (c1 int, c2 boolean, c3 double, c4 string,"
+ + " c5 array, c6 map, c7 map,"
+ + " c8 struct,"
+ + " c9 tinyint, c10 smallint, c11 float, c12 bigint,"
+ + " c13 array>,"
+ + " c14 map>,"
+ + " c15 struct>,"
+ + " c16 array,n:int>>,"
+ + " c17 timestamp, "
+ + " c18 decimal) comment'" + dataTypeTableComment
+ +"' partitioned by (dt STRING)");
+
+ stmt.execute("load data local inpath '"
+ + dataTypeDataFilePath.toString() + "' into table " + dataTypeTableName
+ + " PARTITION (dt='20090619')");
+
+ // drop view. ignore error.
+ try {
+ stmt.execute("drop view " + viewName);
+ } catch (Exception ex) {
+ fail(ex.toString());
+ }
+
+ // create view
+ stmt.execute("create view " + viewName + " comment '"+viewComment
+ +"' as select * from "+ tableName);
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+
+ // drop table
+ Statement stmt = con.createStatement();
+ assertNotNull("Statement is null", stmt);
+ stmt.execute("drop table " + tableName);
+ stmt.execute("drop table " + partitionedTableName);
+ stmt.execute("drop table " + dataTypeTableName);
+
+ con.close();
+ assertTrue("Connection should be closed", con.isClosed());
+
+ Exception expectedException = null;
+ try {
+ con.createStatement();
+ } catch (Exception e) {
+ expectedException = e;
+ }
+
+ assertNotNull(
+ "createStatement() on closed connection should throw exception",
+ expectedException);
+ }
+
+ public void testDataTypes2() throws Exception {
+ Statement stmt = con.createStatement();
+
+ ResultSet res = stmt.executeQuery(
+ "select c5, c1 from " + dataTypeTableName + " order by c1");
+ ResultSetMetaData meta = res.getMetaData();
+
+ // row 1
+ assertTrue(res.next());
+ // skip the last (partitioning) column since it is always non-null
+ for (int i = 1; i < meta.getColumnCount(); i++) {
+ assertNull(res.getObject(i));
+ }
+
+ }
+ public void testErrorDiag() throws SQLException {
+ Statement stmt = con.createStatement();
+
+ // verify syntax error
+ try {
+ ResultSet res = stmt.executeQuery("select from " + dataTypeTableName);
+ } catch (SQLException e) {
+ assertEquals("42000", e.getSQLState());
+ }
+
+ // verify table not fuond error
+ try {
+ ResultSet res = stmt.executeQuery("select * from nonTable");
+ } catch (SQLException e) {
+ assertEquals("42S02", e.getSQLState());
+ }
+
+ // verify invalid column error
+ try {
+ ResultSet res = stmt.executeQuery("select zzzz from " + dataTypeTableName);
+ } catch (SQLException e) {
+ assertEquals("42000", e.getSQLState());
+ }
+
+ }
+
+ /**
+ * verify 'explain ...' resultset
+ * @throws SQLException
+ */
+ public void testExplainStmt() throws SQLException {
+ Statement stmt = con.createStatement();
+
+ ResultSet res = stmt.executeQuery(
+ "explain select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " +
+ "c1*2, sentences(null, null, null) as b from " + dataTypeTableName + " limit 1");
+
+ ResultSetMetaData md = res.getMetaData();
+ assertEquals(md.getColumnCount(), 1); // only one result column
+ assertEquals(md.getColumnLabel(1), EXPL_COLUMN_NAME); // verify the column name
+ //verify that there is data in the resultset
+ assertTrue("Nothing returned explain", res.next());
+ }
+
+ public void testPrepareStatement() {
+
+ String sql = "from (select count(1) from "
+ + tableName
+ + " where 'not?param?not?param' <> 'not_param??not_param' and ?=? "
+ + " and 1=? and 2=? and 3.0=? and 4.0=? and 'test\\'string\"'=? and 5=? and ?=? "
+ + " ) t select '2011-03-25' ddate,'China',true bv, 10 num limit 10";
+
+ ///////////////////////////////////////////////
+ //////////////////// correct testcase
+ //////////////////////////////////////////////
+ try {
+ PreparedStatement ps = con.prepareStatement(sql);
+
+ ps.setBoolean(1, true);
+ ps.setBoolean(2, true);
+
+ ps.setShort(3, Short.valueOf("1"));
+ ps.setInt(4, 2);
+ ps.setFloat(5, 3f);
+ ps.setDouble(6, Double.valueOf(4));
+ ps.setString(7, "test'string\"");
+ ps.setLong(8, 5L);
+ ps.setByte(9, (byte) 1);
+ ps.setByte(10, (byte) 1);
+
+ ps.setMaxRows(2);
+
+ assertTrue(true);
+
+ ResultSet res = ps.executeQuery();
+ assertNotNull(res);
+
+ while (res.next()) {
+ assertEquals("2011-03-25", res.getString("ddate"));
+ assertEquals("10", res.getString("num"));
+ assertEquals((byte) 10, res.getByte("num"));
+ assertEquals("2011-03-25", res.getDate("ddate").toString());
+ assertEquals(Double.valueOf(10).doubleValue(), res.getDouble("num"), 0.1);
+ assertEquals(10, res.getInt("num"));
+ assertEquals(Short.valueOf("10").shortValue(), res.getShort("num"));
+ assertEquals(10L, res.getLong("num"));
+ assertEquals(true, res.getBoolean("bv"));
+ Object o = res.getObject("ddate");
+ assertNotNull(o);
+ o = res.getObject("num");
+ assertNotNull(o);
+ }
+ res.close();
+ assertTrue(true);
+
+ ps.close();
+ assertTrue(true);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail(e.toString());
+ }
+
+ ///////////////////////////////////////////////
+ //////////////////// other failure testcases
+ //////////////////////////////////////////////
+ // set nothing for prepared sql
+ Exception expectedException = null;
+ try {
+ PreparedStatement ps = con.prepareStatement(sql);
+ ps.executeQuery();
+ } catch (Exception e) {
+ expectedException = e;
+ }
+ assertNotNull(
+ "Execute the un-setted sql statement should throw exception",
+ expectedException);
+
+ // set some of parameters for prepared sql, not all of them.
+ expectedException = null;
+ try {
+ PreparedStatement ps = con.prepareStatement(sql);
+ ps.setBoolean(1, true);
+ ps.setBoolean(2, true);
+ ps.executeQuery();
+ } catch (Exception e) {
+ expectedException = e;
+ }
+ assertNotNull(
+ "Execute the invalid setted sql statement should throw exception",
+ expectedException);
+
+ // set the wrong type parameters for prepared sql.
+ expectedException = null;
+ try {
+ PreparedStatement ps = con.prepareStatement(sql);
+
+ // wrong type here
+ ps.setString(1, "wrong");
+
+ assertTrue(true);
+ ResultSet res = ps.executeQuery();
+ if (!res.next()) {
+ throw new Exception("there must be a empty result set");
+ }
+ } catch (Exception e) {
+ expectedException = e;
+ }
+ assertNotNull(
+ "Execute the invalid setted sql statement should throw exception",
+ expectedException);
+ }
+
+ public final void testSelectAll() throws Exception {
+ doTestSelectAll(tableName, -1, -1); // tests not setting maxRows (return all)
+ doTestSelectAll(tableName, 0, -1); // tests setting maxRows to 0 (return all)
+ }
+
+ public final void testSelectAllPartioned() throws Exception {
+ doTestSelectAll(partitionedTableName, -1, -1); // tests not setting maxRows
+ // (return all)
+ doTestSelectAll(partitionedTableName, 0, -1); // tests setting maxRows to 0
+ // (return all)
+ }
+
+ public final void testSelectAllMaxRows() throws Exception {
+ doTestSelectAll(tableName, 100, -1);
+ }
+
+ public final void testSelectAllFetchSize() throws Exception {
+ doTestSelectAll(tableName, 100, 20);
+ }
+
+ public void testDataTypes() throws Exception {
+ Statement stmt = con.createStatement();
+
+ ResultSet res = stmt.executeQuery(
+ "select * from " + dataTypeTableName + " order by c1");
+ ResultSetMetaData meta = res.getMetaData();
+
+ // row 1
+ assertTrue(res.next());
+ // skip the last (partitioning) column since it is always non-null
+ for (int i = 1; i < meta.getColumnCount(); i++) {
+ assertNull(res.getObject(i));
+ }
+ // getXXX returns 0 for numeric types, false for boolean and null for other
+ assertEquals(0, res.getInt(1));
+ assertEquals(false, res.getBoolean(2));
+ assertEquals(0d, res.getDouble(3));
+ assertEquals(null, res.getString(4));
+ assertEquals(null, res.getString(5));
+ assertEquals(null, res.getString(6));
+ assertEquals(null, res.getString(7));
+ assertEquals(null, res.getString(8));
+ assertEquals(0, res.getByte(9));
+ assertEquals(0, res.getShort(10));
+ assertEquals(0f, res.getFloat(11));
+ assertEquals(0L, res.getLong(12));
+ assertEquals(null, res.getString(13));
+ assertEquals(null, res.getString(14));
+ assertEquals(null, res.getString(15));
+ assertEquals(null, res.getString(16));
+
+ // row 2
+ assertTrue(res.next());
+ assertEquals(-1, res.getInt(1));
+ assertEquals(false, res.getBoolean(2));
+ assertEquals(-1.1d, res.getDouble(3));
+ assertEquals("", res.getString(4));
+ assertEquals("[]", res.getString(5));
+ assertEquals("{}", res.getString(6));
+ assertEquals("{}", res.getString(7));
+ assertEquals("[null, null, null]", res.getString(8));
+ assertEquals(-1, res.getByte(9));
+ assertEquals(-1, res.getShort(10));
+ assertEquals(-1.0f, res.getFloat(11));
+ assertEquals(-1, res.getLong(12));
+ assertEquals("[]", res.getString(13));
+ assertEquals("{}", res.getString(14));
+ assertEquals("[null, null]", res.getString(15));
+ assertEquals("[]", res.getString(16));
+ assertEquals(null, res.getString(17));
+ assertEquals(null, res.getTimestamp(17));
+ assertEquals(null, res.getBigDecimal(18));
+
+ // row 3
+ assertTrue(res.next());
+ assertEquals(1, res.getInt(1));
+ assertEquals(true, res.getBoolean(2));
+ assertEquals(1.1d, res.getDouble(3));
+ assertEquals("1", res.getString(4));
+ assertEquals("[1, 2]", res.getString(5));
+ assertEquals("{1=x, 2=y}", res.getString(6));
+ assertEquals("{k=v}", res.getString(7));
+ assertEquals("[a, 9, 2.2]", res.getString(8));
+ assertEquals(1, res.getByte(9));
+ assertEquals(1, res.getShort(10));
+ assertEquals(1.0f, res.getFloat(11));
+ assertEquals(1, res.getLong(12));
+ assertEquals("[[a, b], [c, d]]", res.getString(13));
+ assertEquals("{1={11=12, 13=14}, 2={21=22}}", res.getString(14));
+ assertEquals("[1, [2, x]]", res.getString(15));
+ assertEquals("[[{}, 1], [{c=d, a=b}, 2]]", res.getString(16));
+ assertEquals("2012-04-22 09:00:00.123456789", res.getString(17));
+ assertEquals("2012-04-22 09:00:00.123456789", res.getTimestamp(17).toString());
+ assertEquals("123456789.0123456", res.getBigDecimal(18).toString());
+
+ // test getBoolean rules on non-boolean columns
+ assertEquals(true, res.getBoolean(1));
+ assertEquals(true, res.getBoolean(4));
+
+ // no more rows
+ assertFalse(res.next());
+ }
+
+ private void doTestSelectAll(String tableName, int maxRows, int fetchSize) throws Exception {
+ boolean isPartitionTable = tableName.equals(partitionedTableName);
+
+ Statement stmt = con.createStatement();
+ if (maxRows >= 0) {
+ stmt.setMaxRows(maxRows);
+ }
+ if (fetchSize > 0) {
+ stmt.setFetchSize(fetchSize);
+ assertEquals(fetchSize, stmt.getFetchSize());
+ }
+
+ // JDBC says that 0 means return all, which is the default
+ int expectedMaxRows = maxRows < 1 ? 0 : maxRows;
+
+ assertNotNull("Statement is null", stmt);
+ assertEquals("Statement max rows not as expected", expectedMaxRows, stmt
+ .getMaxRows());
+ assertFalse("Statement should not be closed", stmt.isClosed());
+
+ ResultSet res;
+
+ // run some queries
+ res = stmt.executeQuery("select * from " + tableName);
+ assertNotNull("ResultSet is null", res);
+ assertTrue("getResultSet() not returning expected ResultSet", res == stmt
+ .getResultSet());
+ assertEquals("get update count not as expected", 0, stmt.getUpdateCount());
+ int i = 0;
+
+ ResultSetMetaData meta = res.getMetaData();
+ int expectedColCount = isPartitionTable ? 3 : 2;
+ assertEquals(
+ "Unexpected column count", expectedColCount, meta.getColumnCount());
+
+ boolean moreRow = res.next();
+ while (moreRow) {
+ try {
+ i++;
+ assertEquals(res.getInt(1), res.getInt("under_col"));
+ assertEquals(res.getString(1), res.getString("under_col"));
+ assertEquals(res.getString(2), res.getString("value"));
+ if (isPartitionTable) {
+ assertEquals(res.getString(3), partitionedColumnValue);
+ assertEquals(res.getString(3), res.getString(partitionedColumnName));
+ }
+ assertFalse("Last result value was not null", res.wasNull());
+ assertNull("No warnings should be found on ResultSet", res
+ .getWarnings());
+ res.clearWarnings(); // verifying that method is supported
+
+ // System.out.println(res.getString(1) + " " + res.getString(2));
+ assertEquals(
+ "getInt and getString don't align for the same result value",
+ String.valueOf(res.getInt(1)), res.getString(1));
+ assertEquals("Unexpected result found", "val_" + res.getString(1), res
+ .getString(2));
+ moreRow = res.next();
+ } catch (SQLException e) {
+ System.out.println(e.toString());
+ e.printStackTrace();
+ throw new Exception(e.toString());
+ }
+ }
+
+ // supposed to get 500 rows if maxRows isn't set
+ int expectedRowCount = maxRows > 0 ? maxRows : 500;
+ assertEquals("Incorrect number of rows returned", expectedRowCount, i);
+
+ // should have no more rows
+ assertEquals(false, moreRow);
+
+ assertNull("No warnings should be found on statement", stmt.getWarnings());
+ stmt.clearWarnings(); // verifying that method is supported
+
+ assertNull("No warnings should be found on connection", con.getWarnings());
+ con.clearWarnings(); // verifying that method is supported
+
+ stmt.close();
+ assertTrue("Statement should be closed", stmt.isClosed());
+ }
+
+ public void testErrorMessages() throws SQLException {
+ String invalidSyntaxSQLState = "42000";
+
+ // These tests inherently cause exceptions to be written to the test output
+ // logs. This is undesirable, since you it might appear to someone looking
+ // at the test output logs as if something is failing when it isn't. Not
+ // sure
+ // how to get around that.
+ doTestErrorCase("SELECTT * FROM " + tableName,
+ "cannot recognize input near 'SELECTT' '*' 'FROM'",
+ invalidSyntaxSQLState, 40000);
+ doTestErrorCase("SELECT * FROM some_table_that_does_not_exist",
+ "Table not found", "42S02", 10001);
+ doTestErrorCase("drop table some_table_that_does_not_exist",
+ "Table not found", "42S02", 10001);
+ doTestErrorCase("SELECT invalid_column FROM " + tableName,
+ "Invalid table alias or column reference", invalidSyntaxSQLState, 10004);
+ doTestErrorCase("SELECT invalid_function(under_col) FROM " + tableName,
+ "Invalid function", invalidSyntaxSQLState, 10011);
+
+ // TODO: execute errors like this currently don't return good error
+ // codes and messages. This should be fixed.
+ doTestErrorCase(
+ "create table " + tableName + " (key int, value string)",
+ "FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask",
+ "08S01", 1);
+ }
+
+ private void doTestErrorCase(String sql, String expectedMessage,
+ String expectedSQLState, int expectedErrorCode) throws SQLException {
+ Statement stmt = con.createStatement();
+ boolean exceptionFound = false;
+ try {
+ stmt.execute(sql);
+ } catch (SQLException e) {
+ assertTrue("Adequate error messaging not found for '" + sql + "': "
+ + e.getMessage(), e.getMessage().contains(expectedMessage));
+ assertEquals("Expected SQLState not found for '" + sql + "'",
+ expectedSQLState, e.getSQLState());
+ assertEquals("Expected error code not found for '" + sql + "'",
+ expectedErrorCode, e.getErrorCode());
+ exceptionFound = true;
+ }
+
+ assertNotNull("Exception should have been thrown for query: " + sql,
+ exceptionFound);
+ }
+
+ public void testShowTables() throws SQLException {
+ Statement stmt = con.createStatement();
+ assertNotNull("Statement is null", stmt);
+
+ ResultSet res = stmt.executeQuery("show tables");
+
+ boolean testTableExists = false;
+ while (res.next()) {
+ assertNotNull("table name is null in result set", res.getString(1));
+ if (tableName.equalsIgnoreCase(res.getString(1))) {
+ testTableExists = true;
+ }
+ }
+
+ assertTrue("table name " + tableName
+ + " not found in SHOW TABLES result set", testTableExists);
+ }
+
+ public void testMetaDataGetTables() throws SQLException {
+ Map tests = new HashMap();
+ tests.put("test%jdbc%", new Object[]{"testhivejdbcdriver_table"
+ , "testhivejdbcdriverpartitionedtable"
+ , "testhivejdbcdriverview"});
+ tests.put("%jdbcdriver\\_table", new Object[]{"testhivejdbcdriver_table"});
+ tests.put("testhivejdbcdriver\\_table", new Object[]{"testhivejdbcdriver_table"});
+ tests.put("test_ivejdbcdri_er\\_table", new Object[]{"testhivejdbcdriver_table"});
+ tests.put("test_ivejdbcdri_er_table", new Object[]{"testhivejdbcdriver_table"});
+ tests.put("test_ivejdbcdri_er%table", new Object[]{
+ "testhivejdbcdriver_table", "testhivejdbcdriverpartitionedtable" });
+ tests.put("%jdbc%", new Object[]{ "testhivejdbcdriver_table"
+ , "testhivejdbcdriverpartitionedtable"
+ , "testhivejdbcdriverview"});
+ tests.put("", new Object[]{});
+
+ for (String checkPattern: tests.keySet()) {
+ ResultSet rs = (ResultSet)con.getMetaData().getTables("default", null, checkPattern, null);
+ int cnt = 0;
+ while (rs.next()) {
+ String resultTableName = rs.getString("TABLE_NAME");
+ assertEquals("Get by index different from get by name.", rs.getString(3), resultTableName);
+ assertEquals("Excpected a different table.", tests.get(checkPattern)[cnt], resultTableName);
+ String resultTableComment = rs.getString("REMARKS");
+ assertTrue("Missing comment on the table.", resultTableComment.length()>0);
+ String tableType = rs.getString("TABLE_TYPE");
+ if (resultTableName.endsWith("view")) {
+ assertEquals("Expected a tabletype view but got something else.", "VIRTUAL_VIEW", tableType);
+ }
+ cnt++;
+ }
+ rs.close();
+ assertEquals("Received an incorrect number of tables.", tests.get(checkPattern).length, cnt);
+ }
+
+ // only ask for the views.
+ ResultSet rs = (ResultSet)con.getMetaData().getTables("default", null, null
+ , new String[]{"VIRTUAL_VIEW"});
+ int cnt=0;
+ while (rs.next()) {
+ cnt++;
+ }
+ rs.close();
+ assertEquals("Incorrect number of views found.", 1, cnt);
+ }
+
+ public void testMetaDataGetCatalogs() throws SQLException {
+ ResultSet rs = (ResultSet)con.getMetaData().getCatalogs();
+ ResultSetMetaData resMeta = rs.getMetaData();
+ assertEquals(1, resMeta.getColumnCount());
+ assertEquals("TABLE_CAT", resMeta.getColumnName(1));
+
+ assertFalse(rs.next());
+ }
+
+ public void testMetaDataGetSchemas() throws SQLException {
+ ResultSet rs = (ResultSet)con.getMetaData().getSchemas();
+ ResultSetMetaData resMeta = rs.getMetaData();
+ assertEquals(2, resMeta.getColumnCount());
+ assertEquals("TABLE_SCHEMA", resMeta.getColumnName(1));
+ assertEquals("TABLE_CATALOG", resMeta.getColumnName(2));
+
+ assertTrue(rs.next());
+ assertEquals("default", rs.getString(1));
+// assertNull(rs.getString(2));
+
+ assertFalse(rs.next());
+ rs.close();
+ }
+
+ public void testMetaDataGetTableTypes() throws SQLException {
+ ResultSet rs = (ResultSet)con.getMetaData().getTableTypes();
+
+ Set tabletypes = new HashSet();
+ tabletypes.add("MANAGED_TABLE");
+ tabletypes.add("EXTERNAL_TABLE");
+ tabletypes.add("VIRTUAL_VIEW");
+ tabletypes.add("INDEX_TABLE");
+
+ int cnt = 0;
+ while (rs.next()) {
+ String tabletype = rs.getString("TABLE_TYPE");
+ assertEquals("Get by index different from get by name", rs.getString(1), tabletype);
+ tabletypes.remove(tabletype);
+ cnt++;
+ }
+ rs.close();
+ assertEquals("Incorrect tabletype count.", 0, tabletypes.size());
+ assertTrue("Found less tabletypes then we test for.", cnt >= tabletypes.size());
+ }
+
+ public void testMetaDataGetColumns() throws SQLException {
+ Map tests = new HashMap();
+ tests.put(new String[]{"testhivejdbcdriver\\_table", null}, 2);
+ tests.put(new String[]{"testhivejdbc%", null}, 7);
+ tests.put(new String[]{"testhiveJDBC%", null}, 7);
+ tests.put(new String[]{"%jdbcdriver\\_table", null}, 2);
+ tests.put(new String[]{"%jdbcdriver\\_table%", "under\\_col"}, 1);
+// tests.put(new String[]{"%jdbcdriver\\_table%", "under\\_COL"}, 1);
+ tests.put(new String[]{"%jdbcdriver\\_table%", "under\\_co_"}, 1);
+ tests.put(new String[]{"%jdbcdriver\\_table%", "under_col"}, 1);
+ tests.put(new String[]{"%jdbcdriver\\_table%", "und%"}, 1);
+ tests.put(new String[]{"%jdbcdriver\\_table%", "%"}, 2);
+ tests.put(new String[]{"%jdbcdriver\\_table%", "_%"}, 2);
+
+ for (String[] checkPattern: tests.keySet()) {
+ ResultSet rs = con.getMetaData().getColumns(null, null, checkPattern[0],
+ checkPattern[1]);
+
+ // validate the metadata for the getColumns result set
+ ResultSetMetaData rsmd = rs.getMetaData();
+ assertEquals("TABLE_CAT", rsmd.getColumnName(1));
+
+ int cnt = 0;
+ while (rs.next()) {
+ String columnname = rs.getString("COLUMN_NAME");
+ int ordinalPos = rs.getInt("ORDINAL_POSITION");
+ switch(cnt) {
+ case 0:
+ assertEquals("Wrong column name found", "under_col", columnname);
+ assertEquals("Wrong ordinal position found", ordinalPos, 1);
+ break;
+ case 1:
+ assertEquals("Wrong column name found", "value", columnname);
+ assertEquals("Wrong ordinal position found", ordinalPos, 2);
+ break;
+ default:
+ break;
+ }
+ cnt++;
+ }
+ rs.close();
+ assertEquals("Found less columns then we test for.", tests.get(checkPattern).intValue(), cnt);
+ }
+ }
+
+ /**
+ * Validate the Metadata for the result set of a metadata getColumns call.
+ */
+ public void testMetaDataGetColumnsMetaData() throws SQLException {
+ ResultSet rs = (ResultSet)con.getMetaData().getColumns(null, null
+ , "testhivejdbcdriver\\_table", null);
+
+ ResultSetMetaData rsmd = rs.getMetaData();
+
+ assertEquals("TABLE_CAT", rsmd.getColumnName(1));
+ assertEquals(Types.VARCHAR, rsmd.getColumnType(1));
+ assertEquals(Integer.MAX_VALUE, rsmd.getColumnDisplaySize(1));
+
+ assertEquals("ORDINAL_POSITION", rsmd.getColumnName(17));
+ assertEquals(Types.INTEGER, rsmd.getColumnType(17));
+ assertEquals(11, rsmd.getColumnDisplaySize(17));
+ }
+
+ /*
+ public void testConversionsBaseResultSet() throws SQLException {
+ ResultSet rs = new HiveMetaDataResultSet(Arrays.asList("key")
+ , Arrays.asList("long")
+ , Arrays.asList(1234, "1234", "abc")) {
+ private int cnt=1;
+ public boolean next() throws SQLException {
+ if (cnt", colRS.getString("TYPE_NAME").toLowerCase());
+
+ assertTrue(colRS.next());
+
+ assertEquals("c6", meta.getColumnName(6));
+ assertEquals(Types.VARCHAR, meta.getColumnType(6));
+ assertEquals("string", meta.getColumnTypeName(6));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(6));
+ assertEquals(Integer.MAX_VALUE, meta.getPrecision(6));
+ assertEquals(0, meta.getScale(6));
+
+ assertEquals("c6", colRS.getString("COLUMN_NAME"));
+ assertEquals(Types.VARCHAR, colRS.getInt("DATA_TYPE"));
+ assertEquals("map", colRS.getString("TYPE_NAME").toLowerCase());
+
+ assertTrue(colRS.next());
+
+ assertEquals("c7", meta.getColumnName(7));
+ assertEquals(Types.VARCHAR, meta.getColumnType(7));
+ assertEquals("string", meta.getColumnTypeName(7));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(7));
+ assertEquals(Integer.MAX_VALUE, meta.getPrecision(7));
+ assertEquals(0, meta.getScale(7));
+
+ assertEquals("c7", colRS.getString("COLUMN_NAME"));
+ assertEquals(Types.VARCHAR, colRS.getInt("DATA_TYPE"));
+ assertEquals("map", colRS.getString("TYPE_NAME").toLowerCase());
+
+ assertTrue(colRS.next());
+
+ assertEquals("c8", meta.getColumnName(8));
+ assertEquals(Types.VARCHAR, meta.getColumnType(8));
+ assertEquals("string", meta.getColumnTypeName(8));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(8));
+ assertEquals(Integer.MAX_VALUE, meta.getPrecision(8));
+ assertEquals(0, meta.getScale(8));
+
+ assertEquals("c8", colRS.getString("COLUMN_NAME"));
+ assertEquals(Types.VARCHAR, colRS.getInt("DATA_TYPE"));
+ assertEquals("struct", colRS.getString("TYPE_NAME").toLowerCase());
+
+ assertTrue(colRS.next());
+
+ assertEquals("c9", meta.getColumnName(9));
+ assertEquals(Types.TINYINT, meta.getColumnType(9));
+ assertEquals("tinyint", meta.getColumnTypeName(9));
+ assertEquals(4, meta.getColumnDisplaySize(9));
+ assertEquals(3, meta.getPrecision(9));
+ assertEquals(0, meta.getScale(9));
+
+ assertEquals("c9", colRS.getString("COLUMN_NAME"));
+ assertEquals(Types.TINYINT, colRS.getInt("DATA_TYPE"));
+ assertEquals("tinyint", colRS.getString("TYPE_NAME").toLowerCase());
+ assertEquals(meta.getPrecision(9), colRS.getInt("COLUMN_SIZE"));
+ assertEquals(meta.getScale(9), colRS.getInt("DECIMAL_DIGITS"));
+
+ assertTrue(colRS.next());
+
+ assertEquals("c10", meta.getColumnName(10));
+ assertEquals(Types.SMALLINT, meta.getColumnType(10));
+ assertEquals("smallint", meta.getColumnTypeName(10));
+ assertEquals(6, meta.getColumnDisplaySize(10));
+ assertEquals(5, meta.getPrecision(10));
+ assertEquals(0, meta.getScale(10));
+
+ assertEquals("c10", colRS.getString("COLUMN_NAME"));
+ assertEquals(Types.SMALLINT, colRS.getInt("DATA_TYPE"));
+ assertEquals("smallint", colRS.getString("TYPE_NAME").toLowerCase());
+ assertEquals(meta.getPrecision(10), colRS.getInt("COLUMN_SIZE"));
+ assertEquals(meta.getScale(10), colRS.getInt("DECIMAL_DIGITS"));
+
+ assertTrue(colRS.next());
+
+ assertEquals("c11", meta.getColumnName(11));
+ assertEquals(Types.FLOAT, meta.getColumnType(11));
+ assertEquals("float", meta.getColumnTypeName(11));
+ assertEquals(24, meta.getColumnDisplaySize(11));
+ assertEquals(7, meta.getPrecision(11));
+ assertEquals(7, meta.getScale(11));
+
+ assertEquals("c11", colRS.getString("COLUMN_NAME"));
+ assertEquals(Types.FLOAT, colRS.getInt("DATA_TYPE"));
+ assertEquals("float", colRS.getString("TYPE_NAME").toLowerCase());
+ assertEquals(meta.getPrecision(11), colRS.getInt("COLUMN_SIZE"));
+ assertEquals(meta.getScale(11), colRS.getInt("DECIMAL_DIGITS"));
+
+ assertTrue(colRS.next());
+
+ assertEquals("c12", meta.getColumnName(12));
+ assertEquals(Types.BIGINT, meta.getColumnType(12));
+ assertEquals("bigint", meta.getColumnTypeName(12));
+ assertEquals(20, meta.getColumnDisplaySize(12));
+ assertEquals(19, meta.getPrecision(12));
+ assertEquals(0, meta.getScale(12));
+
+ assertEquals("c12", colRS.getString("COLUMN_NAME"));
+ assertEquals(Types.BIGINT, colRS.getInt("DATA_TYPE"));
+ assertEquals("bigint", colRS.getString("TYPE_NAME").toLowerCase());
+ assertEquals(meta.getPrecision(12), colRS.getInt("COLUMN_SIZE"));
+ assertEquals(meta.getScale(12), colRS.getInt("DECIMAL_DIGITS"));
+
+ assertEquals("_c12", meta.getColumnName(13));
+ assertEquals(Types.INTEGER, meta.getColumnType(13));
+ assertEquals("int", meta.getColumnTypeName(13));
+ assertEquals(11, meta.getColumnDisplaySize(13));
+ assertEquals(10, meta.getPrecision(13));
+ assertEquals(0, meta.getScale(13));
+
+ assertEquals("b", meta.getColumnName(14));
+ assertEquals(Types.VARCHAR, meta.getColumnType(14));
+ assertEquals("string", meta.getColumnTypeName(14));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(14));
+ assertEquals(Integer.MAX_VALUE, meta.getPrecision(14));
+ assertEquals(0, meta.getScale(14));
+
+ assertEquals("c17", meta.getColumnName(15));
+ assertEquals(Types.TIMESTAMP, meta.getColumnType(15));
+ assertEquals("timestamp", meta.getColumnTypeName(15));
+ assertEquals(29, meta.getColumnDisplaySize(15));
+ assertEquals(29, meta.getPrecision(15));
+ assertEquals(9, meta.getScale(15));
+
+ assertEquals("c18", meta.getColumnName(16));
+ assertEquals(Types.DECIMAL, meta.getColumnType(16));
+ assertEquals("decimal", meta.getColumnTypeName(16));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(16));
+ assertEquals(Integer.MAX_VALUE, meta.getPrecision(16));
+ assertEquals(Integer.MAX_VALUE, meta.getScale(16));
+
+ for (int i = 1; i <= meta.getColumnCount(); i++) {
+ assertFalse(meta.isAutoIncrement(i));
+ assertFalse(meta.isCurrency(i));
+ assertEquals(ResultSetMetaData.columnNullable, meta.isNullable(i));
+ }
+ }
+
+ // [url] [host] [port] [db]
+ private static final String[][] URL_PROPERTIES = new String[][] {
+ {"jdbc:hive2://", "", "", "default"},
+ {"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
+ {"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
+ {"jdbc:hive2://foo:1243", "foo", "1243", "default"}};
+
+ public void testDriverProperties() throws SQLException {
+ HiveDriver driver = new HiveDriver();
+
+ for (String[] testValues : URL_PROPERTIES) {
+ DriverPropertyInfo[] dpi = driver.getPropertyInfo(testValues[0], null);
+ assertEquals("unexpected DriverPropertyInfo array size", 3, dpi.length);
+ assertDpi(dpi[0], "HOST", testValues[1]);
+ assertDpi(dpi[1], "PORT", testValues[2]);
+ assertDpi(dpi[2], "DBNAME", testValues[3]);
+ }
+
+ }
+
+ private static void assertDpi(DriverPropertyInfo dpi, String name,
+ String value) {
+ assertEquals("Invalid DriverPropertyInfo name", name, dpi.name);
+ assertEquals("Invalid DriverPropertyInfo value", value, dpi.value);
+ assertEquals("Invalid DriverPropertyInfo required", false, dpi.required);
+ }
+
+
+ /**
+ * validate schema generated by "set" command
+ * @throws SQLException
+ */
+ public void testSetCommand() throws SQLException {
+ // execute set command
+ String sql = "set -v";
+ Statement stmt = con.createStatement();
+ ResultSet res = stmt.executeQuery(sql);
+
+ // Validate resultset columns
+ ResultSetMetaData md = res.getMetaData() ;
+ assertEquals(1, md.getColumnCount());
+ assertEquals(SET_COLUMN_NAME, md.getColumnLabel(1));
+
+ //check if there is data in the resultset
+ assertTrue("Nothing returned by set -v", res.next());
+
+ res.close();
+ stmt.close();
+ }
+
+ /**
+ * Validate error on closed resultset
+ * @throws SQLException
+ */
+ public void testPostClose() throws SQLException {
+ Statement stmt = con.createStatement();
+ ResultSet res = stmt.executeQuery("select * from " + tableName);
+ assertNotNull("ResultSet is null", res);
+ res.close();
+ try { res.getInt(1); fail("Expected SQLException"); }
+ catch (SQLException e) { }
+ try { res.getMetaData(); fail("Expected SQLException"); }
+ catch (SQLException e) { }
+ try { res.setFetchSize(10); fail("Expected SQLException"); }
+ catch (SQLException e) { }
+ }
+
+ /*
+ * The JDBC spec says when you have duplicate column names,
+ * the first one should be returned.
+ */
+ public void testDuplicateColumnNameOrder() throws SQLException {
+ Statement stmt = con.createStatement();
+ ResultSet rs = stmt.executeQuery("SELECT 1 AS a, 2 AS a from " + tableName);
+ assertTrue(rs.next());
+ assertEquals(1, rs.getInt("a"));
+ }
+
+
+ /**
+ * Test bad args to getXXX()
+ * @throws SQLException
+ */
+ public void testOutOfBoundCols() throws SQLException {
+ Statement stmt = con.createStatement();
+
+ ResultSet res = stmt.executeQuery(
+ "select * from " + tableName);
+
+ // row 1
+ assertTrue(res.next());
+
+ try {
+ res.getInt(200);
+ } catch (SQLException e) {
+ }
+
+ try {
+ res.getInt("zzzz");
+ } catch (SQLException e) {
+ }
+
+ }
+
+ /**
+ * Verify selecting using builtin UDFs
+ * @throws SQLException
+ */
+ public void testBuiltInUDFCol() throws SQLException {
+ Statement stmt = con.createStatement();
+ ResultSet res = stmt.executeQuery("select c12, bin(c12) from " + dataTypeTableName
+ + " where c1=1");
+ ResultSetMetaData md = res.getMetaData();
+ assertEquals(md.getColumnCount(), 2); // only one result column
+ assertEquals(md.getColumnLabel(2), "_c1" ); // verify the system generated column name
+ assertTrue(res.next());
+ assertEquals(res.getLong(1), 1);
+ assertEquals(res.getString(2), "1");
+ res.close();
+ }
+
+ /**
+ * Verify selecting named expression columns
+ * @throws SQLException
+ */
+ public void testExprCol() throws SQLException {
+ Statement stmt = con.createStatement();
+ ResultSet res = stmt.executeQuery("select c1+1 as col1, length(c4) as len from " + dataTypeTableName
+ + " where c1=1");
+ ResultSetMetaData md = res.getMetaData();
+ assertEquals(md.getColumnCount(), 2); // only one result column
+ assertEquals(md.getColumnLabel(1), "col1" ); // verify the column name
+ assertEquals(md.getColumnLabel(2), "len" ); // verify the column name
+ assertTrue(res.next());
+ assertEquals(res.getInt(1), 2);
+ assertEquals(res.getInt(2), 1);
+ res.close();
+ }
+
+ /**
+ * test getProcedureColumns()
+ * @throws SQLException
+ */
+ public void testProcCols() throws SQLException {
+ DatabaseMetaData dbmd = con.getMetaData();
+ assertNotNull(dbmd);
+ // currently getProcedureColumns always returns an empty resultset for Hive
+ ResultSet res = dbmd.getProcedureColumns(null, null, null, null);
+ ResultSetMetaData md = res.getMetaData();
+ assertEquals(md.getColumnCount(), 20);
+ assertFalse(res.next());
+ }
+
+ /**
+ * test testProccedures()
+ * @throws SQLException
+ */
+ public void testProccedures() throws SQLException {
+ DatabaseMetaData dbmd = con.getMetaData();
+ assertNotNull(dbmd);
+ // currently testProccedures always returns an empty resultset for Hive
+ ResultSet res = dbmd.getProcedures(null, null, null);
+ ResultSetMetaData md = res.getMetaData();
+ assertEquals(md.getColumnCount(), 9);
+ assertFalse(res.next());
+ }
+
+ /**
+ * test getPrimaryKeys()
+ * @throws SQLException
+ */
+ public void testPrimaryKeys() throws SQLException {
+ DatabaseMetaData dbmd = con.getMetaData();
+ assertNotNull(dbmd);
+ // currently getPrimaryKeys always returns an empty resultset for Hive
+ ResultSet res = dbmd.getPrimaryKeys(null, null, null);
+ ResultSetMetaData md = res.getMetaData();
+ assertEquals(md.getColumnCount(), 6);
+ assertFalse(res.next());
+ }
+
+ /**
+ * test getImportedKeys()
+ * @throws SQLException
+ */
+ public void testImportedKeys() throws SQLException {
+ DatabaseMetaData dbmd = con.getMetaData();
+ assertNotNull(dbmd);
+ // currently getImportedKeys always returns an empty resultset for Hive
+ ResultSet res = dbmd.getImportedKeys(null, null, null);
+ ResultSetMetaData md = res.getMetaData();
+ assertEquals(md.getColumnCount(), 14);
+ assertFalse(res.next());
+ }
+
+}
diff --git metastore/build.xml metastore/build.xml
index 9e60b66..0e94611 100755
--- metastore/build.xml
+++ metastore/build.xml
@@ -22,15 +22,6 @@
-
-
- You must set the 'thrift.home' property!
- Executing ${thrift.home}/bin/thrift on ${ant.project.name}/if/hive_metastore.thrift
-
-
-
-
-
diff --git ql/.gitignore ql/.gitignore
new file mode 100644
index 0000000..d41d7f4
--- /dev/null
+++ ql/.gitignore
@@ -0,0 +1 @@
+TempStatsStore
diff --git ql/build.xml ql/build.xml
index 649d365..09600ff 100644
--- ql/build.xml
+++ ql/build.xml
@@ -30,26 +30,22 @@
+
+
-
-
- You must set the 'thrift.home' property!
- Executing ${thrift.home}/bin/thrift on ${ant.project.name}/if/queryplan.thrift
-
-
-
-
-
+
+
-
-
-
+ hadoopVersion="${hadoopVersion}"/>
+
+
-
-
-
+
-
-
- You must set the 'thrift.home' property!
- Executing ${thrift.home}/bin/thrift to build java serde Constants...
-
-
-
- Executing ${thrift.home}/bin/thrift to build complex.thrift test classes...
-
-
-
- Executing ${thrift.home}/bin/thrift to build testthrift.thrift classes...
-
-
-
- Executing ${thrift.home}/bin/thrift to build megastruct.thrift classes...
-
-
-
-
-
Generating data/files/complex.seq...
diff --git service/build.xml service/build.xml
index 19bdb9f..cb0beb4 100644
--- service/build.xml
+++ service/build.xml
@@ -22,15 +22,6 @@
-
-
- You must set the 'thrift.home' property!
- Executing ${thrift.home}/bin/thrift on ${ant.project.name}/if/hive_service.thrift
-
-
-
-
-
PRIMITIVE_TYPES = [
+ TTypeId.BOOLEAN_TYPE
+ TTypeId.TINYINT_TYPE
+ TTypeId.SMALLINT_TYPE
+ TTypeId.INT_TYPE
+ TTypeId.BIGINT_TYPE
+ TTypeId.FLOAT_TYPE
+ TTypeId.DOUBLE_TYPE
+ TTypeId.STRING_TYPE
+ TTypeId.TIMESTAMP_TYPE
+ TTypeId.BINARY_TYPE,
+ TTypeId.DECIMAL_TYPE
+]
+
+const set COMPLEX_TYPES = [
+ TTypeId.ARRAY_TYPE
+ TTypeId.MAP_TYPE
+ TTypeId.STRUCT_TYPE
+ TTypeId.UNION_TYPE
+ TTypeId.USER_DEFINED_TYPE
+]
+
+const set COLLECTION_TYPES = [
+ TTypeId.ARRAY_TYPE
+ TTypeId.MAP_TYPE
+]
+
+const map TYPE_NAMES = {
+ TTypeId.BOOLEAN_TYPE: "BOOLEAN",
+ TTypeId.TINYINT_TYPE: "TINYINT",
+ TTypeId.SMALLINT_TYPE: "SMALLINT",
+ TTypeId.INT_TYPE: "INT",
+ TTypeId.BIGINT_TYPE: "BIGINT",
+ TTypeId.FLOAT_TYPE: "FLOAT",
+ TTypeId.DOUBLE_TYPE: "DOUBLE",
+ TTypeId.STRING_TYPE: "STRING",
+ TTypeId.TIMESTAMP_TYPE: "TIMESTAMP",
+ TTypeId.BINARY_TYPE: "BINARY",
+ TTypeId.ARRAY_TYPE: "ARRAY",
+ TTypeId.MAP_TYPE: "MAP",
+ TTypeId.STRUCT_TYPE: "STRUCT",
+ TTypeId.UNION_TYPE: "UNIONTYPE"
+ TTypeId.DECIMAL_TYPE: "DECIMAL"
+}
+
+// Thrift does not support recursively defined types or forward declarations,
+// which makes it difficult to represent Hive's nested types.
+// To get around these limitations TTypeDesc employs a type list that maps
+// integer "pointers" to TTypeEntry objects. The following examples show
+// how different types are represented using this scheme:
+//
+// "INT":
+// TTypeDesc {
+// types = [
+// TTypeEntry.primitive_entry {
+// type = INT_TYPE
+// }
+// ]
+// }
+//
+// "ARRAY":
+// TTypeDesc {
+// types = [
+// TTypeEntry.array_entry {
+// object_type_ptr = 1
+// },
+// TTypeEntry.primitive_entry {
+// type = INT_TYPE
+// }
+// ]
+// }
+//
+// "MAP":
+// TTypeDesc {
+// types = [
+// TTypeEntry.map_entry {
+// key_type_ptr = 1
+// value_type_ptr = 2
+// },
+// TTypeEntry.primitive_entry {
+// type = INT_TYPE
+// },
+// TTypeEntry.primitive_entry {
+// type = STRING_TYPE
+// }
+// ]
+// }
+
+typedef i32 TTypeEntryPtr
+
+// Type entry for a primitive type.
+struct TPrimitiveTypeEntry {
+ // The primitive type token. This must satisfy the condition
+ // that type is in the PRIMITIVE_TYPES set.
+ 1: required TTypeId type
+}
+
+// Type entry for an ARRAY type.
+struct TArrayTypeEntry {
+ 1: required TTypeEntryPtr objectTypePtr
+}
+
+// Type entry for a MAP type.
+struct TMapTypeEntry {
+ 1: required TTypeEntryPtr keyTypePtr
+ 2: required TTypeEntryPtr valueTypePtr
+}
+
+// Type entry for a STRUCT type.
+struct TStructTypeEntry {
+ 1: required map nameToTypePtr
+}
+
+// Type entry for a UNIONTYPE type.
+struct TUnionTypeEntry {
+ 1: required map nameToTypePtr
+}
+
+struct TUserDefinedTypeEntry {
+ // The fully qualified name of the class implementing this type.
+ 1: required string typeClassName
+}
+
+// We use a union here since Thrift does not support inheritance.
+union TTypeEntry {
+ 1: TPrimitiveTypeEntry primitiveEntry
+ 2: TArrayTypeEntry arrayEntry
+ 3: TMapTypeEntry mapEntry
+ 4: TStructTypeEntry structEntry
+ 5: TUnionTypeEntry unionEntry
+ 6: TUserDefinedTypeEntry userDefinedTypeEntry
+}
+
+// Type descriptor for columns.
+struct TTypeDesc {
+ // The "top" type is always the first element of the list.
+ // If the top type is an ARRAY, MAP, STRUCT, or UNIONTYPE
+ // type, then subsequent elements represent nested types.
+ 1: required list types
+}
+
+// A result set column descriptor.
+struct TColumnDesc {
+ // The name of the column
+ 1: required string columnName
+
+ // The type descriptor for this column
+ 2: required TTypeDesc typeDesc
+
+ // The ordinal position of this column in the schema
+ 3: required i32 position
+
+ 4: optional string comment
+}
+
+// Metadata used to describe the schema (column names, types, comments)
+// of result sets.
+struct TTableSchema {
+ 1: required list columns
+}
+
+// A Boolean column value.
+struct TBoolValue {
+ // NULL if value is unset.
+ 1: optional bool value
+}
+
+// A Byte column value.
+struct TByteValue {
+ // NULL if value is unset.
+ 1: optional byte value
+}
+
+// A signed, 16 bit column value.
+struct TI16Value {
+ // NULL if value is unset
+ 1: optional i16 value
+}
+
+// A signed, 32 bit column value
+struct TI32Value {
+ // NULL if value is unset
+ 1: optional i32 value
+}
+
+// A signed 64 bit column value
+struct TI64Value {
+ // NULL if value is unset
+ 1: optional i64 value
+}
+
+// A floating point 64 bit column value
+struct TDoubleValue {
+ // NULL if value is unset
+ 1: optional double value
+}
+
+struct TStringValue {
+ // NULL if value is unset
+ 1: optional string value
+}
+
+union TColumn {
+ 1: list boolColumn
+ 2: list byteColumn
+ 3: list i16Column
+ 4: list i32Column
+ 5: list i64Column
+ 6: list doubleColumn
+ 7: list stringColumn
+}
+
+// A single column value in a result set.
+// Note that Hive's type system is richer than Thrift's,
+// so in some cases we have to map multiple Hive types
+// to the same Thrift type. On the client-side this is
+// disambiguated by looking at the Schema of the
+// result set.
+union TColumnValue {
+ 1: TBoolValue boolVal // BOOLEAN
+ 2: TByteValue byteVal // TINYINT
+ 3: TI16Value i16Val // SMALLINT
+ 4: TI32Value i32Val // INT
+ 5: TI64Value i64Val // BIGINT, TIMESTAMP
+ 6: TDoubleValue doubleVal // FLOAT, DOUBLE
+ 7: TStringValue stringVal // STRING, LIST, MAP, STRUCT, UNIONTYPE, BINARY, DECIMAL
+}
+
+// Represents a row in a rowset.
+struct TRow {
+ 1: required list colVals
+}
+
+// Represents a rowset
+struct TRowSet {
+ // The starting row offset of this rowset.
+ 1: required i64 startRowOffset
+ 2: required list rows
+ 3: optional list columns
+}
+
+// The return status code contained in each response.
+enum TStatusCode {
+ SUCCESS_STATUS,
+ SUCCESS_WITH_INFO_STATUS,
+ STILL_EXECUTING_STATUS,
+ ERROR_STATUS,
+ INVALID_HANDLE_STATUS
+}
+
+// The return status of a remote request
+struct TStatus {
+ 1: required TStatusCode statusCode
+
+ // If status is SUCCESS_WITH_INFO, info_msgs may be populated with
+ // additional diagnostic information.
+ 2: optional list infoMessages
+
+ // If status is ERROR, then the following fields may be set
+ 3: optional string sqlState // as defined in the ISO/IEF CLI specification
+ 4: optional i32 errorCode // internal error code
+ 5: optional string errorMessage
+}
+
+// The state of an operation (i.e. a query or other
+// asynchronous operation that generates a result set)
+// on the server.
+enum TOperationState {
+ // The operation has been initialized
+ INITIALIZED_STATE,
+
+ // The operation is running. In this state the result
+ // set is not available.
+ RUNNING_STATE,
+
+ // The operation has completed. When an operation is in
+ // this state its result set may be fetched.
+ FINISHED_STATE,
+
+ // The operation was canceled by a client
+ CANCELED_STATE,
+
+ // The operation was closed by a client
+ CLOSED_STATE,
+
+ // The operation failed due to an error
+ ERROR_STATE,
+
+ // The operation is in an unrecognized state
+ UKNOWN_STATE,
+}
+
+
+// A string identifier. This is interpreted literally.
+typedef string TIdentifier
+
+// A search pattern.
+//
+// Valid search pattern characters:
+// '_': Any single character.
+// '%': Any sequence of zero or more characters.
+// '\': Escape character used to include special characters,
+// e.g. '_', '%', '\'. If a '\' precedes a non-special
+// character it has no special meaning and is interpreted
+// literally.
+typedef string TPattern
+
+
+// A search pattern or identifier. Used as input
+// parameter for many of the catalog functions.
+typedef string TPatternOrIdentifier
+
+struct THandleIdentifier {
+ // 16 byte globally unique identifier
+ // This is the public ID of the handle and
+ // can be used for reporting.
+ 1: required binary guid,
+
+ // 16 byte secret generated by the server
+ // and used to verify that the handle is not
+ // being hijacked by another user.
+ 2: required binary secret,
+}
+
+// Client-side handle to persistent
+// session information on the server-side.
+struct TSessionHandle {
+ 1: required THandleIdentifier sessionId
+}
+
+// The subtype of an OperationHandle.
+enum TOperationType {
+ EXECUTE_STATEMENT,
+ GET_TYPE_INFO,
+ GET_CATALOGS,
+ GET_SCHEMAS,
+ GET_TABLES,
+ GET_TABLE_TYPES,
+ GET_COLUMNS,
+ GET_FUNCTIONS,
+ UNKNOWN,
+}
+
+// Client-side reference to a task running
+// asynchronously on the server.
+struct TOperationHandle {
+ 1: required THandleIdentifier operationId
+ 2: required TOperationType operationType
+
+ // If hasResultSet = TRUE, then this operation
+ // generates a result set that can be fetched.
+ // Note that the result set may be empty.
+ //
+ // If hasResultSet = FALSE, then this operation
+ // does not generate a result set, and calling
+ // GetResultSetMetadata or FetchResults against
+ // this OperationHandle will generate an error.
+ 3: required bool hasResultSet
+
+ // For operations that don't generate result sets,
+ // modifiedRowCount is either:
+ //
+ // 1) The number of rows that were modified by
+ // the DML operation (e.g. number of rows inserted,
+ // number of rows deleted, etc).
+ //
+ // 2) 0 for operations that don't modify or add rows.
+ //
+ // 3) < 0 if the operation is capable of modifiying rows,
+ // but Hive is unable to determine how many rows were
+ // modified. For example, Hive's LOAD DATA command
+ // doesn't generate row count information because
+ // Hive doesn't inspect the data as it is loaded.
+ //
+ // modifiedRowCount is unset if the operation generates
+ // a result set.
+ 4: optional double modifiedRowCount
+}
+
+
+// OpenSession()
+//
+// Open a session (connection) on the server against
+// which operations may be executed.
+struct TOpenSessionReq {
+ // The version of the HiveServer2 protocol that the client is using.
+ 1: required TProtocolVersion client_protocol = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1
+
+ // Username and password for authentication.
+ // Depending on the authentication scheme being used,
+ // this information may instead be provided by a lower
+ // protocol layer, in which case these fields may be
+ // left unset.
+ 2: optional string username
+ 3: optional string password
+
+ // Configuration overlay which is applied when the session is
+ // first created.
+ 4: optional map configuration
+}
+
+struct TOpenSessionResp {
+ 1: required TStatus status
+
+ // The protocol version that the server is using.
+ 2: required TProtocolVersion serverProtocolVersion = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1
+
+ // Session Handle
+ 3: optional TSessionHandle sessionHandle
+
+ // The configuration settings for this session.
+ 4: optional map configuration
+}
+
+
+// CloseSession()
+//
+// Closes the specified session and frees any resources
+// currently allocated to that session. Any open
+// operations in that session will be canceled.
+struct TCloseSessionReq {
+ 1: required TSessionHandle sessionHandle
+}
+
+struct TCloseSessionResp {
+ 1: required TStatus status
+}
+
+
+
+enum TGetInfoType {
+ CLI_MAX_DRIVER_CONNECTIONS = 0,
+ CLI_MAX_CONCURRENT_ACTIVITIES = 1,
+ CLI_DATA_SOURCE_NAME = 2,
+ CLI_FETCH_DIRECTION = 8,
+ CLI_SERVER_NAME = 13,
+ CLI_SEARCH_PATTERN_ESCAPE = 14,
+ CLI_DBMS_NAME = 17,
+ CLI_DBMS_VER = 18,
+ CLI_ACCESSIBLE_TABLES = 19,
+ CLI_ACCESSIBLE_PROCEDURES = 20,
+ CLI_CURSOR_COMMIT_BEHAVIOR = 23,
+ CLI_DATA_SOURCE_READ_ONLY = 25,
+ CLI_DEFAULT_TXN_ISOLATION = 26,
+ CLI_IDENTIFIER_CASE = 28,
+ CLI_IDENTIFIER_QUOTE_CHAR = 29,
+ CLI_MAX_COLUMN_NAME_LEN = 30,
+ CLI_MAX_CURSOR_NAME_LEN = 31,
+ CLI_MAX_SCHEMA_NAME_LEN = 32,
+ CLI_MAX_CATALOG_NAME_LEN = 34,
+ CLI_MAX_TABLE_NAME_LEN = 35,
+ CLI_SCROLL_CONCURRENCY = 43,
+ CLI_TXN_CAPABLE = 46,
+ CLI_USER_NAME = 47,
+ CLI_TXN_ISOLATION_OPTION = 72,
+ CLI_INTEGRITY = 73,
+ CLI_GETDATA_EXTENSIONS = 81,
+ CLI_NULL_COLLATION = 85,
+ CLI_ALTER_TABLE = 86,
+ CLI_ORDER_BY_COLUMNS_IN_SELECT = 90,
+ CLI_SPECIAL_CHARACTERS = 94,
+ CLI_MAX_COLUMNS_IN_GROUP_BY = 97,
+ CLI_MAX_COLUMNS_IN_INDEX = 98,
+ CLI_MAX_COLUMNS_IN_ORDER_BY = 99,
+ CLI_MAX_COLUMNS_IN_SELECT = 100,
+ CLI_MAX_COLUMNS_IN_TABLE = 101,
+ CLI_MAX_INDEX_SIZE = 102,
+ CLI_MAX_ROW_SIZE = 104,
+ CLI_MAX_STATEMENT_LEN = 105,
+ CLI_MAX_TABLES_IN_SELECT = 106,
+ CLI_MAX_USER_NAME_LEN = 107,
+ CLI_OJ_CAPABILITIES = 115,
+
+ CLI_XOPEN_CLI_YEAR = 10000,
+ CLI_CURSOR_SENSITIVITY = 10001,
+ CLI_DESCRIBE_PARAMETER = 10002,
+ CLI_CATALOG_NAME = 10003,
+ CLI_COLLATION_SEQ = 10004,
+ CLI_MAX_IDENTIFIER_LEN = 10005,
+}
+
+union TGetInfoValue {
+ 1: string stringValue
+ 2: i16 smallIntValue
+ 3: i32 integerBitmask
+ 4: i32 integerFlag
+ 5: i32 binaryValue
+ 6: i64 lenValue
+}
+
+// GetInfo()
+//
+// This function is based on ODBC's CLIGetInfo() function.
+// The function returns general information about the data source
+// using the same keys as ODBC.
+struct TGetInfoReq {
+ // The sesssion to run this request against
+ 1: required TSessionHandle sessionHandle
+
+ 2: required TGetInfoType infoType
+}
+
+struct TGetInfoResp {
+ 1: required TStatus status
+
+ 2: required TGetInfoValue infoValue
+}
+
+
+// ExecuteStatement()
+//
+// Execute a statement.
+// The returned OperationHandle can be used to check on the
+// status of the statement, and to fetch results once the
+// statement has finished executing.
+struct TExecuteStatementReq {
+ // The session to exexcute the statement against
+ 1: required TSessionHandle sessionHandle
+
+ // The statement to be executed (DML, DDL, SET, etc)
+ 2: required string statement
+
+ // Configuration properties that are overlayed on top of the
+ // the existing session configuration before this statement
+ // is executed. These properties apply to this statement
+ // only and will not affect the subsequent state of the Session.
+ 3: optional map confOverlay
+}
+
+struct TExecuteStatementResp {
+ 1: required TStatus status
+ 2: optional TOperationHandle operationHandle
+}
+
+
+// GetTypeInfo()
+//
+// Get information about types supported by the HiveServer instance.
+// The information is returned as a result set which can be fetched
+// using the OperationHandle provided in the response.
+//
+// Refer to the documentation for ODBC's CLIGetTypeInfo function for
+// the format of the result set.
+struct TGetTypeInfoReq {
+ // The session to run this request against.
+ 1: required TSessionHandle sessionHandle
+}
+
+struct TGetTypeInfoResp {
+ 1: required TStatus status
+ 2: optional TOperationHandle operationHandle
+}
+
+
+// GetCatalogs()
+//
+// Returns the list of catalogs (databases)
+// Results are ordered by TABLE_CATALOG
+//
+// Resultset columns :
+// col1
+// name: TABLE_CAT
+// type: STRING
+// desc: Catalog name. NULL if not applicable.
+//
+struct TGetCatalogsReq {
+ // Session to run this request against
+ 1: required TSessionHandle sessionHandle
+}
+
+struct TGetCatalogsResp {
+ 1: required TStatus status
+ 2: optional TOperationHandle operationHandle
+}
+
+
+// GetSchemas()
+//
+// Retrieves the schema names available in this database.
+// The results are ordered by TABLE_CATALOG and TABLE_SCHEM.
+// col1
+// name: TABLE_SCHEM
+// type: STRING
+// desc: schema name
+// col2
+// name: TABLE_CATALOG
+// type: STRING
+// desc: catalog name
+struct TGetSchemasReq {
+ // Session to run this request against
+ 1: required TSessionHandle sessionHandle
+
+ // Name of the catalog. Must not contain a search pattern.
+ 2: optional TIdentifier catalogName
+
+ // schema name or pattern
+ 3: optional TPatternOrIdentifier schemaName
+}
+
+struct TGetSchemasResp {
+ 1: required TStatus status
+ 2: optional TOperationHandle operationHandle
+}
+
+
+// GetTables()
+//
+// Returns a list of tables with catalog, schema, and table
+// type information. The information is returned as a result
+// set which can be fetched using the OperationHandle
+// provided in the response.
+// Results are ordered by TABLE_TYPE, TABLE_CAT, TABLE_SCHEM, and TABLE_NAME
+//
+// Result Set Columns:
+//
+// col1
+// name: TABLE_CAT
+// type: STRING
+// desc: Catalog name. NULL if not applicable.
+//
+// col2
+// name: TABLE_SCHEM
+// type: STRING
+// desc: Schema name.
+//
+// col3
+// name: TABLE_NAME
+// type: STRING
+// desc: Table name.
+//
+// col4
+// name: TABLE_TYPE
+// type: STRING
+// desc: The table type, e.g. "TABLE", "VIEW", etc.
+//
+// col5
+// name: REMARKS
+// type: STRING
+// desc: Comments about the table
+//
+struct TGetTablesReq {
+ // Session to run this request against
+ 1: required TSessionHandle sessionHandle
+
+ // Name of the catalog or a search pattern.
+ 2: optional TPatternOrIdentifier catalogName
+
+ // Name of the schema or a search pattern.
+ 3: optional TPatternOrIdentifier schemaName
+
+ // Name of the table or a search pattern.
+ 4: optional TPatternOrIdentifier tableName
+
+ // List of table types to match
+ // e.g. "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY",
+ // "LOCAL TEMPORARY", "ALIAS", "SYNONYM", etc.
+ 5: optional list tableTypes
+}
+
+struct TGetTablesResp {
+ 1: required TStatus status
+ 2: optional TOperationHandle operationHandle
+}
+
+
+// GetTableTypes()
+//
+// Returns the table types available in this database.
+// The results are ordered by table type.
+//
+// col1
+// name: TABLE_TYPE
+// type: STRING
+// desc: Table type name.
+struct TGetTableTypesReq {
+ // Session to run this request against
+ 1: required TSessionHandle sessionHandle
+}
+
+struct TGetTableTypesResp {
+ 1: required TStatus status
+ 2: optional TOperationHandle operationHandle
+}
+
+
+// GetColumns()
+//
+// Returns a list of columns in the specified tables.
+// The information is returned as a result set which can be fetched
+// using the OperationHandle provided in the response.
+// Results are ordered by TABLE_CAT, TABLE_SCHEM, TABLE_NAME,
+// and ORDINAL_POSITION.
+//
+// Result Set Columns are the same as those for the ODBC CLIColumns
+// function.
+//
+struct TGetColumnsReq {
+ // Session to run this request against
+ 1: required TSessionHandle sessionHandle
+
+ // Name of the catalog. Must not contain a search pattern.
+ 2: optional TIdentifier catalogName
+
+ // Schema name or search pattern
+ 3: optional TPatternOrIdentifier schemaName
+
+ // Table name or search pattern
+ 4: optional TPatternOrIdentifier tableName
+
+ // Column name or search pattern
+ 5: optional TPatternOrIdentifier columnName
+}
+
+struct TGetColumnsResp {
+ 1: required TStatus status
+ 2: optional TOperationHandle operationHandle
+}
+
+
+// GetFunctions()
+//
+// Returns a list of functions supported by the data source. The
+// behavior of this function matches
+// java.sql.DatabaseMetaData.getFunctions() both in terms of
+// inputs and outputs.
+//
+// Result Set Columns:
+//
+// col1
+// name: FUNCTION_CAT
+// type: STRING
+// desc: Function catalog (may be null)
+//
+// col2
+// name: FUNCTION_SCHEM
+// type: STRING
+// desc: Function schema (may be null)
+//
+// col3
+// name: FUNCTION_NAME
+// type: STRING
+// desc: Function name. This is the name used to invoke the function.
+//
+// col4
+// name: REMARKS
+// type: STRING
+// desc: Explanatory comment on the function.
+//
+// col5
+// name: FUNCTION_TYPE
+// type: SMALLINT
+// desc: Kind of function. One of:
+// * functionResultUnknown - Cannot determine if a return value or a table
+// will be returned.
+// * functionNoTable - Does not a return a table.
+// * functionReturnsTable - Returns a table.
+//
+// col6
+// name: SPECIFIC_NAME
+// type: STRING
+// desc: The name which uniquely identifies this function within its schema.
+// In this case this is the fully qualified class name of the class
+// that implements this function.
+//
+struct TGetFunctionsReq {
+ // Session to run this request against
+ 1: required TSessionHandle sessionHandle
+
+ // A catalog name; must match the catalog name as it is stored in the
+ // database; "" retrieves those without a catalog; null means
+ // that the catalog name should not be used to narrow the search.
+ 2: optional TIdentifier catalogName
+
+ // A schema name pattern; must match the schema name as it is stored
+ // in the database; "" retrieves those without a schema; null means
+ // that the schema name should not be used to narrow the search.
+ 3: optional TPatternOrIdentifier schemaName
+
+ // A function name pattern; must match the function name as it is stored
+ // in the database.
+ 4: required TPatternOrIdentifier functionName
+}
+
+struct TGetFunctionsResp {
+ 1: required TStatus status
+ 2: optional TOperationHandle operationHandle
+}
+
+
+// GetOperationStatus()
+//
+// Get the status of an operation running on the server.
+struct TGetOperationStatusReq {
+ // Session to run this request against
+ 1: required TOperationHandle operationHandle
+}
+
+struct TGetOperationStatusResp {
+ 1: required TStatus status
+ 2: optional TOperationState operationState
+}
+
+
+// CancelOperation()
+//
+// Cancels processing on the specified operation handle and
+// frees any resources which were allocated.
+struct TCancelOperationReq {
+ // Operation to cancel
+ 1: required TOperationHandle operationHandle
+}
+
+struct TCancelOperationResp {
+ 1: required TStatus status
+}
+
+
+// CloseOperation()
+//
+// Given an operation in the FINISHED, CANCELED,
+// or ERROR states, CloseOperation() will free
+// all of the resources which were allocated on
+// the server to service the operation.
+struct TCloseOperationReq {
+ 1: required TOperationHandle operationHandle
+}
+
+struct TCloseOperationResp {
+ 1: required TStatus status
+}
+
+
+// GetResultSetMetadata()
+//
+// Retrieves schema information for the specified operation
+struct TGetResultSetMetadataReq {
+ // Operation for which to fetch result set schema information
+ 1: required TOperationHandle operationHandle
+}
+
+struct TGetResultSetMetadataResp {
+ 1: required TStatus status
+ 2: optional TTableSchema schema
+}
+
+
+enum TFetchOrientation {
+ // Get the next rowset. The fetch offset is ignored.
+ FETCH_NEXT,
+
+ // Get the previous rowset. The fetch offset is ignored.
+ // NOT SUPPORTED
+ FETCH_PRIOR,
+
+ // Return the rowset at the given fetch offset relative
+ // to the curren rowset.
+ // NOT SUPPORTED
+ FETCH_RELATIVE,
+
+ // Return the rowset at the specified fetch offset.
+ // NOT SUPPORTED
+ FETCH_ABSOLUTE,
+
+ // Get the first rowset in the result set.
+ FETCH_FIRST,
+
+ // Get the last rowset in the result set.
+ // NOT SUPPORTED
+ FETCH_LAST
+}
+
+// FetchResults()
+//
+// Fetch rows from the server corresponding to
+// a particular OperationHandle.
+struct TFetchResultsReq {
+ // Operation from which to fetch results.
+ 1: required TOperationHandle operationHandle
+
+ // The fetch orientation. For V1 this must be either
+ // FETCH_NEXT or FETCH_FIRST. Defaults to FETCH_NEXT.
+ 2: required TFetchOrientation orientation = TFetchOrientation.FETCH_NEXT
+
+ // Max number of rows that should be returned in
+ // the rowset.
+ 3: required i64 maxRows
+}
+
+struct TFetchResultsResp {
+ 1: required TStatus status
+
+ // TRUE if there are more rows left to fetch from the server.
+ 2: optional bool hasMoreRows
+
+ // The rowset. This is optional so that we have the
+ // option in the future of adding alternate formats for
+ // representing result set data, e.g. delimited strings,
+ // binary encoded, etc.
+ 3: optional TRowSet results
+}
+
+service TCLIService {
+
+ TOpenSessionResp OpenSession(1:TOpenSessionReq req);
+
+ TCloseSessionResp CloseSession(1:TCloseSessionReq req);
+
+ TGetInfoResp GetInfo(1:TGetInfoReq req);
+
+ TExecuteStatementResp ExecuteStatement(1:TExecuteStatementReq req);
+
+ TGetTypeInfoResp GetTypeInfo(1:TGetTypeInfoReq req);
+
+ TGetCatalogsResp GetCatalogs(1:TGetCatalogsReq req);
+
+ TGetSchemasResp GetSchemas(1:TGetSchemasReq req);
+
+ TGetTablesResp GetTables(1:TGetTablesReq req);
+
+ TGetTableTypesResp GetTableTypes(1:TGetTableTypesReq req);
+
+ TGetColumnsResp GetColumns(1:TGetColumnsReq req);
+
+ TGetFunctionsResp GetFunctions(1:TGetFunctionsReq req);
+
+ TGetOperationStatusResp GetOperationStatus(1:TGetOperationStatusReq req);
+
+ TCancelOperationResp CancelOperation(1:TCancelOperationReq req);
+
+ TCloseOperationResp CloseOperation(1:TCloseOperationReq req);
+
+ TGetResultSetMetadataResp GetResultSetMetadata(1:TGetResultSetMetadataReq req);
+
+ TFetchResultsResp FetchResults(1:TFetchResultsReq req);
+}
diff --git service/src/java/org/apache/hive/service/AbstractService.java service/src/java/org/apache/hive/service/AbstractService.java
new file mode 100644
index 0000000..c2a2b2d
--- /dev/null
+++ service/src/java/org/apache/hive/service/AbstractService.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * AbstractService.
+ *
+ */
+public abstract class AbstractService implements Service {
+
+ private static final Log LOG = LogFactory.getLog(AbstractService.class);
+
+ /**
+ * Service state: initially {@link STATE#NOTINITED}.
+ */
+ private STATE state = STATE.NOTINITED;
+
+ /**
+ * Service name.
+ */
+ private final String name;
+ /**
+ * Service start time. Will be zero until the service is started.
+ */
+ private long startTime;
+
+ /**
+ * The configuration. Will be null until the service is initialized.
+ */
+ private HiveConf hiveConf;
+
+ /**
+ * List of state change listeners; it is final to ensure
+ * that it will never be null.
+ */
+ private final List listeners =
+ new ArrayList();
+
+ /**
+ * Construct the service.
+ *
+ * @param name
+ * service name
+ */
+ public AbstractService(String name) {
+ this.name = name;
+ }
+
+ @Override
+ public synchronized STATE getServiceState() {
+ return state;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @throws IllegalStateException
+ * if the current service state does not permit
+ * this action
+ */
+ @Override
+ public synchronized void init(HiveConf hiveConf) {
+ ensureCurrentState(STATE.NOTINITED);
+ this.hiveConf = hiveConf;
+ changeState(STATE.INITED);
+ LOG.info("Service:" + getName() + " is inited.");
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @throws IllegalStateException
+ * if the current service state does not permit
+ * this action
+ */
+ @Override
+ public synchronized void start() {
+ startTime = System.currentTimeMillis();
+ ensureCurrentState(STATE.INITED);
+ changeState(STATE.STARTED);
+ LOG.info("Service:" + getName() + " is started.");
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @throws IllegalStateException
+ * if the current service state does not permit
+ * this action
+ */
+ @Override
+ public synchronized void stop() {
+ if (state == STATE.STOPPED ||
+ state == STATE.INITED ||
+ state == STATE.NOTINITED) {
+ // already stopped, or else it was never
+ // started (eg another service failing canceled startup)
+ return;
+ }
+ ensureCurrentState(STATE.STARTED);
+ changeState(STATE.STOPPED);
+ LOG.info("Service:" + getName() + " is stopped.");
+ }
+
+ @Override
+ public synchronized void register(ServiceStateChangeListener l) {
+ listeners.add(l);
+ }
+
+ @Override
+ public synchronized void unregister(ServiceStateChangeListener l) {
+ listeners.remove(l);
+ }
+
+ @Override
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public synchronized HiveConf getHiveConf() {
+ return hiveConf;
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+
+ /**
+ * Verify that that a service is in a given state.
+ *
+ * @param currentState
+ * the desired state
+ * @throws IllegalStateException
+ * if the service state is different from
+ * the desired state
+ */
+ private void ensureCurrentState(STATE currentState) {
+ ServiceOperations.ensureCurrentState(state, currentState);
+ }
+
+ /**
+ * Change to a new state and notify all listeners.
+ * This is a private method that is only invoked from synchronized methods,
+ * which avoid having to clone the listener list. It does imply that
+ * the state change listener methods should be short lived, as they
+ * will delay the state transition.
+ *
+ * @param newState
+ * new service state
+ */
+ private void changeState(STATE newState) {
+ state = newState;
+ // notify listeners
+ for (ServiceStateChangeListener l : listeners) {
+ l.stateChanged(this);
+ }
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/BreakableService.java service/src/java/org/apache/hive/service/BreakableService.java
new file mode 100644
index 0000000..9c44beb
--- /dev/null
+++ service/src/java/org/apache/hive/service/BreakableService.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.Service.STATE;
+
+/**
+ * This is a service that can be configured to break on any of the lifecycle
+ * events, so test the failure handling of other parts of the service
+ * infrastructure.
+ *
+ * It retains a counter to the number of times each entry point is called -
+ * these counters are incremented before the exceptions are raised and
+ * before the superclass state methods are invoked.
+ *
+ */
+public class BreakableService extends AbstractService {
+ private boolean failOnInit;
+ private boolean failOnStart;
+ private boolean failOnStop;
+ private final int[] counts = new int[4];
+
+ public BreakableService() {
+ this(false, false, false);
+ }
+
+ public BreakableService(boolean failOnInit,
+ boolean failOnStart,
+ boolean failOnStop) {
+ super("BreakableService");
+ this.failOnInit = failOnInit;
+ this.failOnStart = failOnStart;
+ this.failOnStop = failOnStop;
+ inc(STATE.NOTINITED);
+ }
+
+ private int convert(STATE state) {
+ switch (state) {
+ case NOTINITED: return 0;
+ case INITED: return 1;
+ case STARTED: return 2;
+ case STOPPED: return 3;
+ default: return 0;
+ }
+ }
+
+ private void inc(STATE state) {
+ int index = convert(state);
+ counts[index] ++;
+ }
+
+ public int getCount(STATE state) {
+ return counts[convert(state)];
+ }
+
+ private void maybeFail(boolean fail, String action) {
+ if (fail) {
+ throw new BrokenLifecycleEvent(action);
+ }
+ }
+
+ @Override
+ public void init(HiveConf conf) {
+ inc(STATE.INITED);
+ maybeFail(failOnInit, "init");
+ super.init(conf);
+ }
+
+ @Override
+ public void start() {
+ inc(STATE.STARTED);
+ maybeFail(failOnStart, "start");
+ super.start();
+ }
+
+ @Override
+ public void stop() {
+ inc(STATE.STOPPED);
+ maybeFail(failOnStop, "stop");
+ super.stop();
+ }
+
+ public void setFailOnInit(boolean failOnInit) {
+ this.failOnInit = failOnInit;
+ }
+
+ public void setFailOnStart(boolean failOnStart) {
+ this.failOnStart = failOnStart;
+ }
+
+ public void setFailOnStop(boolean failOnStop) {
+ this.failOnStop = failOnStop;
+ }
+
+ /**
+ * The exception explicitly raised on a failure
+ */
+ public static class BrokenLifecycleEvent extends RuntimeException {
+ BrokenLifecycleEvent(String action) {
+ super("Lifecycle Failure during " + action);
+ }
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/CompositeService.java service/src/java/org/apache/hive/service/CompositeService.java
new file mode 100644
index 0000000..8979118
--- /dev/null
+++ service/src/java/org/apache/hive/service/CompositeService.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * CompositeService.
+ *
+ */
+public class CompositeService extends AbstractService {
+
+ private static final Log LOG = LogFactory.getLog(CompositeService.class);
+
+ private final List serviceList = new ArrayList();
+
+ public CompositeService(String name) {
+ super(name);
+ }
+
+ public Collection getServices() {
+ return Collections.unmodifiableList(serviceList);
+ }
+
+ protected synchronized void addService(Service service) {
+ serviceList.add(service);
+ }
+
+ protected synchronized boolean removeService(Service service) {
+ return serviceList.remove(service);
+ }
+
+ @Override
+ public synchronized void init(HiveConf hiveConf) {
+ for (Service service : serviceList) {
+ service.init(hiveConf);
+ }
+ super.init(hiveConf);
+ }
+
+ @Override
+ public synchronized void start() {
+ int i = 0;
+ try {
+ for (int n = serviceList.size(); i < n; i++) {
+ Service service = serviceList.get(i);
+ service.start();
+ }
+ super.start();
+ } catch (Throwable e) {
+ LOG.error("Error starting services " + getName(), e);
+ // Note that the state of the failed service is still INITED and not
+ // STARTED. Even though the last service is not started completely, still
+ // call stop() on all services including failed service to make sure cleanup
+ // happens.
+ stop(i);
+ throw new ServiceException("Failed to Start " + getName(), e);
+ }
+
+ }
+
+ @Override
+ public synchronized void stop() {
+ if (this.getServiceState() == STATE.STOPPED) {
+ // The base composite-service is already stopped, don't do anything again.
+ return;
+ }
+ if (serviceList.size() > 0) {
+ stop(serviceList.size() - 1);
+ }
+ super.stop();
+ }
+
+ private synchronized void stop(int numOfServicesStarted) {
+ // stop in reserve order of start
+ for (int i = numOfServicesStarted; i >= 0; i--) {
+ Service service = serviceList.get(i);
+ try {
+ service.stop();
+ } catch (Throwable t) {
+ LOG.info("Error stopping " + service.getName(), t);
+ }
+ }
+ }
+
+ /**
+ * JVM Shutdown hook for CompositeService which will stop the given
+ * CompositeService gracefully in case of JVM shutdown.
+ */
+ public static class CompositeServiceShutdownHook implements Runnable {
+
+ private final CompositeService compositeService;
+
+ public CompositeServiceShutdownHook(CompositeService compositeService) {
+ this.compositeService = compositeService;
+ }
+
+ @Override
+ public void run() {
+ try {
+ // Stop the Composite Service
+ compositeService.stop();
+ } catch (Throwable t) {
+ LOG.info("Error stopping " + compositeService.getName(), t);
+ }
+ }
+ }
+
+
+}
diff --git service/src/java/org/apache/hive/service/FilterService.java service/src/java/org/apache/hive/service/FilterService.java
new file mode 100644
index 0000000..5a50874
--- /dev/null
+++ service/src/java/org/apache/hive/service/FilterService.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * FilterService.
+ *
+ */
+public class FilterService implements Service {
+
+
+ private final Service service;
+ private final long startTime = System.currentTimeMillis();
+
+ public FilterService(Service service) {
+ this.service = service;
+ }
+
+ @Override
+ public void init(HiveConf config) {
+ service.init(config);
+ }
+
+ @Override
+ public void start() {
+ service.start();
+ }
+
+ @Override
+ public void stop() {
+ service.stop();
+ }
+
+
+ @Override
+ public void register(ServiceStateChangeListener listener) {
+ service.register(listener);
+ }
+
+ @Override
+ public void unregister(ServiceStateChangeListener listener) {
+ service.unregister(listener);
+ }
+
+ @Override
+ public String getName() {
+ return service.getName();
+ }
+
+ @Override
+ public HiveConf getHiveConf() {
+ return service.getHiveConf();
+ }
+
+ @Override
+ public STATE getServiceState() {
+ return service.getServiceState();
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/Service.java service/src/java/org/apache/hive/service/Service.java
new file mode 100644
index 0000000..2111837
--- /dev/null
+++ service/src/java/org/apache/hive/service/Service.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * Service.
+ *
+ */
+public interface Service {
+
+ /**
+ * Service states
+ */
+ public enum STATE {
+ /** Constructed but not initialized */
+ NOTINITED,
+
+ /** Initialized but not started or stopped */
+ INITED,
+
+ /** started and not stopped */
+ STARTED,
+
+ /** stopped. No further state transitions are permitted */
+ STOPPED
+ }
+
+ /**
+ * Initialize the service.
+ *
+ * The transition must be from {@link STATE#NOTINITED} to {@link STATE#INITED} unless the
+ * operation failed and an exception was raised.
+ *
+ * @param config
+ * the configuration of the service
+ */
+ void init(HiveConf conf);
+
+
+ /**
+ * Start the service.
+ *
+ * The transition should be from {@link STATE#INITED} to {@link STATE#STARTED} unless the
+ * operation failed and an exception was raised.
+ */
+ void start();
+
+ /**
+ * Stop the service.
+ *
+ * This operation must be designed to complete regardless of the initial state
+ * of the service, including the state of all its internal fields.
+ */
+ void stop();
+
+ /**
+ * Register an instance of the service state change events.
+ *
+ * @param listener
+ * a new listener
+ */
+ void register(ServiceStateChangeListener listener);
+
+ /**
+ * Unregister a previously instance of the service state change events.
+ *
+ * @param listener
+ * the listener to unregister.
+ */
+ void unregister(ServiceStateChangeListener listener);
+
+ /**
+ * Get the name of this service.
+ *
+ * @return the service name
+ */
+ String getName();
+
+ /**
+ * Get the configuration of this service.
+ * This is normally not a clone and may be manipulated, though there are no
+ * guarantees as to what the consequences of such actions may be
+ *
+ * @return the current configuration, unless a specific implementation chooses
+ * otherwise.
+ */
+ HiveConf getHiveConf();
+
+ /**
+ * Get the current service state
+ *
+ * @return the state of the service
+ */
+ STATE getServiceState();
+
+ /**
+ * Get the service start time
+ *
+ * @return the start time of the service. This will be zero if the service
+ * has not yet been started.
+ */
+ long getStartTime();
+
+}
diff --git service/src/java/org/apache/hive/service/ServiceException.java service/src/java/org/apache/hive/service/ServiceException.java
new file mode 100644
index 0000000..3622cf8
--- /dev/null
+++ service/src/java/org/apache/hive/service/ServiceException.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+/**
+ * ServiceException.
+ *
+ */
+public class ServiceException extends RuntimeException {
+
+ public ServiceException(Throwable cause) {
+ super(cause);
+ }
+
+ public ServiceException(String message) {
+ super(message);
+ }
+
+ public ServiceException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
diff --git service/src/java/org/apache/hive/service/ServiceOperations.java service/src/java/org/apache/hive/service/ServiceOperations.java
new file mode 100644
index 0000000..8946219
--- /dev/null
+++ service/src/java/org/apache/hive/service/ServiceOperations.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * ServiceOperations.
+ *
+ */
+public final class ServiceOperations {
+ private static final Log LOG = LogFactory.getLog(AbstractService.class);
+
+ private ServiceOperations() {
+ }
+
+ /**
+ * Verify that that a service is in a given state.
+ * @param state the actual state a service is in
+ * @param expectedState the desired state
+ * @throws IllegalStateException if the service state is different from
+ * the desired state
+ */
+ public static void ensureCurrentState(Service.STATE state,
+ Service.STATE expectedState) {
+ if (state != expectedState) {
+ throw new IllegalStateException("For this operation, the " +
+ "current service state must be "
+ + expectedState
+ + " instead of " + state);
+ }
+ }
+
+ /**
+ * Initialize a service.
+ *
+ * The service state is checked before the operation begins.
+ * This process is not thread safe.
+ * @param service a service that must be in the state
+ * {@link Service.STATE#NOTINITED}
+ * @param configuration the configuration to initialize the service with
+ * @throws RuntimeException on a state change failure
+ * @throws IllegalStateException if the service is in the wrong state
+ */
+
+ public static void init(Service service, HiveConf configuration) {
+ Service.STATE state = service.getServiceState();
+ ensureCurrentState(state, Service.STATE.NOTINITED);
+ service.init(configuration);
+ }
+
+ /**
+ * Start a service.
+ *
+ * The service state is checked before the operation begins.
+ * This process is not thread safe.
+ * @param service a service that must be in the state
+ * {@link Service.STATE#INITED}
+ * @throws RuntimeException on a state change failure
+ * @throws IllegalStateException if the service is in the wrong state
+ */
+
+ public static void start(Service service) {
+ Service.STATE state = service.getServiceState();
+ ensureCurrentState(state, Service.STATE.INITED);
+ service.start();
+ }
+
+ /**
+ * Initialize then start a service.
+ *
+ * The service state is checked before the operation begins.
+ * This process is not thread safe.
+ * @param service a service that must be in the state
+ * {@link Service.STATE#NOTINITED}
+ * @param configuration the configuration to initialize the service with
+ * @throws RuntimeException on a state change failure
+ * @throws IllegalStateException if the service is in the wrong state
+ */
+ public static void deploy(Service service, HiveConf configuration) {
+ init(service, configuration);
+ start(service);
+ }
+
+ /**
+ * Stop a service.
+ * Do nothing if the service is null or not
+ * in a state in which it can be/needs to be stopped.
+ *
+ * The service state is checked before the operation begins.
+ * This process is not thread safe.
+ * @param service a service or null
+ */
+ public static void stop(Service service) {
+ if (service != null) {
+ Service.STATE state = service.getServiceState();
+ if (state == Service.STATE.STARTED) {
+ service.stop();
+ }
+ }
+ }
+
+ /**
+ * Stop a service; if it is null do nothing. Exceptions are caught and
+ * logged at warn level. (but not Throwables). This operation is intended to
+ * be used in cleanup operations
+ *
+ * @param service a service; may be null
+ * @return any exception that was caught; null if none was.
+ */
+ public static Exception stopQuietly(Service service) {
+ try {
+ stop(service);
+ } catch (Exception e) {
+ LOG.warn("When stopping the service " + service.getName()
+ + " : " + e,
+ e);
+ return e;
+ }
+ return null;
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/ServiceStateChangeListener.java service/src/java/org/apache/hive/service/ServiceStateChangeListener.java
new file mode 100644
index 0000000..16ad9a9
--- /dev/null
+++ service/src/java/org/apache/hive/service/ServiceStateChangeListener.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+/**
+ * ServiceStateChangeListener.
+ *
+ */
+public interface ServiceStateChangeListener {
+
+ /**
+ * Callback to notify of a state change. The service will already
+ * have changed state before this callback is invoked.
+ *
+ * This operation is invoked on the thread that initiated the state change,
+ * while the service itself in in a sychronized section.
+ *
+ *
Any long-lived operation here will prevent the service state
+ * change from completing in a timely manner.
+ *
If another thread is somehow invoked from the listener, and
+ * that thread invokes the methods of the service (including
+ * subclass-specific methods), there is a risk of a deadlock.
+ *
+ *
+ *
+ * @param service the service that has changed.
+ */
+ void stateChanged(Service service);
+
+}
diff --git service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
new file mode 100644
index 0000000..4db0022
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+public class AnonymousAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+
+ @Override
+ public void Authenticate(String user, String password) throws AuthenticationException {
+ // no-op authentication
+ return;
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
new file mode 100644
index 0000000..b92fd83
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+public class AuthenticationProviderFactory {
+
+ public static enum AuthMethods {
+ LDAP("LDAP"),
+ CUSTOM("CUSTOM"),
+ NONE("NONE");
+
+ String authMethod;
+
+ AuthMethods(String authMethod) {
+ this.authMethod = authMethod;
+ }
+
+ public String getAuthMethod() {
+ return authMethod;
+ }
+
+ public static AuthMethods getValidAuthMethod(String authMethodStr) throws AuthenticationException {
+ for (AuthMethods auth : AuthMethods.values()) {
+ if (authMethodStr.equals(auth.getAuthMethod())) {
+ return auth;
+ }
+ }
+ throw new AuthenticationException("Not a valid authentication method");
+ }
+ }
+
+ private AuthenticationProviderFactory () {
+ }
+
+ public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod)
+ throws AuthenticationException {
+ if (authMethod.equals(AuthMethods.LDAP)) {
+ return new LdapAuthenticationProviderImpl();
+ } else if (authMethod.equals(AuthMethods.CUSTOM)) {
+ return new CustomAuthenticationProviderImpl();
+ } else if (authMethod.equals(AuthMethods.NONE)) {
+ return new AnonymousAuthenticationProviderImpl();
+ } else {
+ throw new AuthenticationException("Unsupported authentication method");
+ }
+ }
+}
diff --git service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
new file mode 100644
index 0000000..d0cbcbb
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.util.ReflectionUtils;
+
+public class CustomAuthenticationProviderImpl
+ implements PasswdAuthenticationProvider {
+
+ Class extends PasswdAuthenticationProvider> customHandlerClass;
+ PasswdAuthenticationProvider customProvider;
+
+ @SuppressWarnings("unchecked")
+ CustomAuthenticationProviderImpl () {
+ HiveConf conf = new HiveConf();
+ this.customHandlerClass = (Class extends PasswdAuthenticationProvider>)
+ conf.getClass(
+ HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.name(),
+ PasswdAuthenticationProvider.class);
+ this.customProvider =
+ ReflectionUtils.newInstance(this.customHandlerClass, conf);
+ }
+
+ @Override
+ public void Authenticate(String user, String password)
+ throws AuthenticationException {
+ this.customProvider.Authenticate(user, password);
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
new file mode 100644
index 0000000..1809e1b
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TTransportException;
+import org.apache.thrift.transport.TTransportFactory;
+
+public class HiveAuthFactory {
+
+ public static enum AuthTypes {
+ NOSASL("NOSASL"),
+ NONE("NONE"),
+ LDAP("LDAP"),
+ KERBEROS("KERBEROS"),
+ CUSTOM("CUSTOM");
+
+ private String authType; // Auth type for SASL
+
+ AuthTypes(String authType) {
+ this.authType = authType;
+ }
+
+ public String getAuthName() {
+ return authType;
+ }
+
+ };
+
+ private HadoopThriftAuthBridge.Server saslServer = null;
+ private String authTypeStr;
+ HiveConf conf;
+
+ public HiveAuthFactory() throws TTransportException {
+ conf = new HiveConf();
+
+ authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
+ if (authTypeStr == null) {
+ authTypeStr = AuthTypes.NONE.getAuthName();
+ }
+ if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())
+ && ShimLoader.getHadoopShims().isSecureShimImpl()) {
+ saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer(
+ conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
+ conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)
+ );
+ }
+ }
+
+ public TTransportFactory getAuthTransFactory() throws LoginException {
+
+ TTransportFactory transportFactory;
+
+ if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+ try {
+ transportFactory = saslServer.createTransportFactory();
+ } catch (TTransportException e) {
+ throw new LoginException(e.getMessage());
+ }
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName())) {
+ transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName())) {
+ transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) {
+ transportFactory = new TTransportFactory();
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
+ transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+ } else {
+ throw new LoginException("Unsupported authentication type " + authTypeStr);
+ }
+ return transportFactory;
+ }
+
+ public TProcessorFactory getAuthProcFactory(ThriftCLIService service)
+ throws LoginException {
+ if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+ return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
+ } else {
+ return PlainSaslHelper.getPlainProcessorFactory(service);
+ }
+ }
+
+ public String getRemoteUser() {
+ if (saslServer != null) {
+ return saslServer.getRemoteUser();
+ } else {
+ return null;
+ }
+ }
+
+ /* perform kerberos login using the hadoop shim API if the configuration is available */
+ public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
+ String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
+ String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
+ if (principal.isEmpty() && keyTabFile.isEmpty()) {
+ // no security configuration available
+ return;
+ } else if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
+ ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
+ } else {
+ throw new IOException ("HiveServer2 kerberos principal or keytab is not correctly configured");
+ }
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
new file mode 100644
index 0000000..379dafb
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+
+import javax.security.sasl.SaslException;
+
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server;
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCLIService.Iface;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.thrift.TProcessor;
+import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TTransport;
+
+public class KerberosSaslHelper {
+
+ private static class CLIServiceProcessorFactory extends TProcessorFactory {
+ private final ThriftCLIService service;
+ private final Server saslServer;
+
+ public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
+ super(null);
+ this.service = service;
+ this.saslServer = saslServer;
+ }
+
+ @Override
+ public TProcessor getProcessor(TTransport trans) {
+ TProcessor sqlProcessor = new TCLIService.Processor(service);
+ return saslServer.wrapNonAssumingProcessor(sqlProcessor);
+ }
+ }
+
+ public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
+ ThriftCLIService service) {
+ return new CLIServiceProcessorFactory (saslServer, service);
+ }
+
+ public static TTransport getKerberosTransport(String principal, String host,
+ final TTransport underlyingTransport) throws SaslException {
+ try {
+ final String names[] = principal.split("[/@]");
+ if (names.length != 3) {
+ throw new IllegalArgumentException("Kerberos principal should have 3 parts: "
+ + principal);
+ }
+
+ HadoopThriftAuthBridge.Client authBridge =
+ ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
+ return authBridge.createClientTransport(principal, host,
+ "KERBEROS", null, underlyingTransport);
+ } catch (IOException e) {
+ throw new SaslException("Failed to open client transport", e);
+ }
+ }
+
+
+}
diff --git service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
new file mode 100644
index 0000000..3919827
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.util.Hashtable;
+
+import javax.naming.Context;
+import javax.naming.NamingException;
+import javax.naming.directory.DirContext;
+import javax.naming.directory.InitialDirContext;
+import javax.security.sasl.AuthenticationException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+
+public class LdapAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+
+ String ldapURL;
+ String baseDN;
+
+ LdapAuthenticationProviderImpl () {
+ HiveConf conf = new HiveConf();
+ this.ldapURL = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
+ this.baseDN = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
+ }
+
+ @Override
+ public void Authenticate(String user, String password)
+ throws AuthenticationException {
+
+ Hashtable env = new Hashtable();
+ env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
+ env.put(Context.PROVIDER_URL, ldapURL);
+
+ // setup the security principal
+ String bindDN;
+ if (baseDN != null) {
+ bindDN = "uid=" + user + "," + baseDN;
+ } else {
+ bindDN = user;
+ }
+ env.put(Context.SECURITY_AUTHENTICATION, "simple");
+ env.put(Context.SECURITY_PRINCIPAL, bindDN);
+ env.put(Context.SECURITY_CREDENTIALS, password);
+
+ try {
+ // Create initial context
+ DirContext ctx = new InitialDirContext(env);
+ ctx.close();
+ } catch (NamingException e) {
+ throw new AuthenticationException("Error validating LDAP user");
+ }
+ return;
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
new file mode 100644
index 0000000..2d0da3a
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+public interface PasswdAuthenticationProvider {
+ /**
+ * The Authenticate method is called by the HiveServer2 authentication layer
+ * to authenticate users for their requests.
+ * If a user is to be granted, return nothing/throw nothing.
+ * When a user is to be disallowed, throw an appropriate {@link AuthenticationException}.
+ *
+ * For an example implementation, see {@link LdapAuthenticationProviderImpl}.
+ *
+ * @param user - The username received over the connection request
+ * @param password - The password received over the connection request
+ * @throws AuthenticationException - When a user is found to be
+ * invalid by the implementation
+ */
+ void Authenticate(String user, String password) throws AuthenticationException;
+}
diff --git service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
new file mode 100644
index 0000000..18d4aae
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.NameCallback;
+import javax.security.auth.callback.PasswordCallback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.sasl.AuthorizeCallback;
+import javax.security.sasl.SaslException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.auth.PlainSaslServer.ExternalAuthenticationCallback;
+import org.apache.hive.service.auth.PlainSaslServer.SaslPlainProvider;
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCLIService.Iface;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.thrift.TProcessor;
+import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TSaslClientTransport;
+import org.apache.thrift.transport.TSaslServerTransport;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportFactory;
+
+public class PlainSaslHelper {
+
+ private static class PlainServerCallbackHandler implements CallbackHandler {
+
+ @Override
+ public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
+ ExternalAuthenticationCallback ac = null;
+ for (int i = 0; i < callbacks.length; i++) {
+ if (callbacks[i] instanceof ExternalAuthenticationCallback) {
+ ac = (ExternalAuthenticationCallback) callbacks[i];
+ break;
+ } else {
+ throw new UnsupportedCallbackException(callbacks[i]);
+ }
+ }
+
+ if (ac != null) {
+ PasswdAuthenticationProvider provider =
+ AuthenticationProviderFactory.getAuthenticationProvider(ac.getAuthMethod());
+ provider.Authenticate(ac.getUserName(), ac.getPasswd());
+ ac.setAuthenticated(true);
+ }
+ }
+ }
+
+ public static class PlainClientbackHandler implements CallbackHandler {
+
+ private final String userName;
+ private final String passWord;
+
+ public PlainClientbackHandler (String userName, String passWord) {
+ this.userName = userName;
+ this.passWord = passWord;
+ }
+
+ @Override
+ public void handle(Callback[] callbacks)
+ throws IOException, UnsupportedCallbackException {
+ AuthorizeCallback ac = null;
+ for (int i = 0; i < callbacks.length; i++) {
+ if (callbacks[i] instanceof NameCallback) {
+ NameCallback nameCallback = (NameCallback)callbacks[i];
+ nameCallback.setName(userName);
+ } else if (callbacks[i] instanceof PasswordCallback) {
+ PasswordCallback passCallback = (PasswordCallback) callbacks[i];
+ passCallback.setPassword(passWord.toCharArray());
+ } else {
+ throw new UnsupportedCallbackException(callbacks[i]);
+ }
+ }
+ }
+ }
+
+ private static class SQLPlainProcessorFactory extends TProcessorFactory {
+ private final ThriftCLIService service;
+ private final HiveConf conf;
+ private final boolean doAsEnabled;
+
+ public SQLPlainProcessorFactory(ThriftCLIService service) {
+ super(null);
+ this.service = service;
+ this.conf = service.getHiveConf();
+ this.doAsEnabled = conf.getBoolean("hive.server2.enable.doAs", false);
+ }
+
+ @Override
+ public TProcessor getProcessor(TTransport trans) {
+ TProcessor baseProcessor = new TCLIService.Processor(service);
+ return doAsEnabled ? new TUGIContainingProcessor(baseProcessor, conf) :
+ new TSetIpAddressProcessor(service);
+ }
+ }
+
+ public static TProcessorFactory getPlainProcessorFactory(ThriftCLIService service) {
+ return new SQLPlainProcessorFactory(service);
+ }
+
+ // Register Plain SASL server provider
+ static {
+ java.security.Security.addProvider(new SaslPlainProvider());
+ }
+
+ public static TTransportFactory getPlainTransportFactory(String authTypeStr) {
+ TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
+ saslFactory.addServerDefinition("PLAIN",
+ authTypeStr, null, new HashMap(),
+ new PlainServerCallbackHandler());
+ return saslFactory;
+ }
+
+ public static TTransport getPlainTransport(String userName, String passwd,
+ final TTransport underlyingTransport) throws SaslException {
+ return new TSaslClientTransport("PLAIN", null,
+ null, null, new HashMap(),
+ new PlainClientbackHandler(userName, passwd), underlyingTransport);
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/auth/PlainSaslServer.java service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
new file mode 100644
index 0000000..d7f1e31
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+import java.util.ArrayDeque;
+import java.util.Deque;
+import java.util.Map;
+
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.sasl.SaslException;
+import javax.security.sasl.SaslServer;
+import javax.security.sasl.SaslServerFactory;
+
+import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
+
+/**
+ *
+ * PlainSaslServer.
+ * Sun JDK only provides PLAIN client and not server. This class implements the Plain SASL server
+ * conforming to RFC #4616 (http://www.ietf.org/rfc/rfc4616.txt)
+ */
+public class PlainSaslServer implements SaslServer {
+ private final AuthMethods authMethod;
+ private String user;
+ private String passwd;
+ private final CallbackHandler handler;
+
+ // Callback for external authentication
+ // The authMethod indicates the type of authentication (LDAP, Unix, Windows)
+ public static class ExternalAuthenticationCallback implements Callback {
+ private final AuthMethods authMethod;
+ private final String userName;
+ private final String passwd;
+ private boolean authenticated;
+
+ public ExternalAuthenticationCallback(AuthMethods authMethod, String userName, String passwd) {
+ this.authMethod = authMethod;
+ this.userName = userName;
+ this.passwd = passwd;
+ authenticated = false;
+ }
+
+ public AuthMethods getAuthMethod() {
+ return authMethod;
+ }
+
+ public String getUserName() {
+ return userName;
+ }
+
+ public String getPasswd() {
+ return passwd;
+ }
+
+ public void setAuthenticated (boolean authenticated) {
+ this.authenticated = authenticated;
+ }
+
+ public boolean isAuthenticated () {
+ return authenticated;
+ }
+ }
+
+
+ PlainSaslServer(CallbackHandler handler, String authMethodStr) throws SaslException {
+ this.handler = handler;
+ this.authMethod = AuthMethods.getValidAuthMethod(authMethodStr);
+ }
+
+ public String getMechanismName() {
+ return "PLAIN";
+ }
+
+ public byte[] evaluateResponse(byte[] response) throws SaslException {
+ try {
+ // parse the response
+ // message = [authzid] UTF8NUL authcid UTF8NUL passwd'
+
+ Deque tokenList = new ArrayDeque();
+ StringBuilder messageToken = new StringBuilder();
+ for (byte b : response) {
+ if (b == 0) {
+ tokenList.addLast(messageToken.toString());
+ messageToken = new StringBuilder();
+ } else {
+ messageToken.append((char)b);
+ }
+ }
+ tokenList.addLast(messageToken.toString());
+
+ // validate response
+ if ((tokenList.size() < 2) || (tokenList.size() > 3)) {
+ throw new SaslException("Invalid message format");
+ }
+ passwd = tokenList.removeLast();
+ user = tokenList.removeLast();
+ if (user == null || user.isEmpty()) {
+ throw new SaslException("No user name provide");
+ }
+ if (passwd == null || passwd.isEmpty()) {
+ throw new SaslException("No password name provide");
+ }
+
+ // pass the user and passwd via AuthorizeCallback
+ // the caller needs to authenticate
+ ExternalAuthenticationCallback exAuth = new
+ ExternalAuthenticationCallback(authMethod, user, passwd);
+ Callback[] cbList = new Callback[] {exAuth};
+ handler.handle(cbList);
+ if (!exAuth.isAuthenticated()) {
+ throw new SaslException("Authentication failed");
+ }
+ } catch (IllegalStateException eL) {
+ throw new SaslException("Invalid message format", eL);
+ } catch (IOException eI) {
+ throw new SaslException("Error validating the login", eI);
+ } catch (UnsupportedCallbackException eU) {
+ throw new SaslException("Error validating the login", eU);
+ }
+ return null;
+ }
+
+ public boolean isComplete() {
+ return user != null;
+ }
+
+ public String getAuthorizationID() {
+ return user;
+ }
+
+ public byte[] unwrap(byte[] incoming, int offset, int len) {
+ throw new UnsupportedOperationException();
+ }
+
+ public byte[] wrap(byte[] outgoing, int offset, int len) {
+ throw new UnsupportedOperationException();
+ }
+
+ public Object getNegotiatedProperty(String propName) {
+ return null;
+ }
+
+ public void dispose() {}
+
+ public static class SaslPlainServerFactory implements SaslServerFactory {
+
+ public SaslServer createSaslServer(
+ String mechanism, String protocol, String serverName, Map props, CallbackHandler cbh)
+ {
+ if ("PLAIN".equals(mechanism)) {
+ try {
+ return new PlainSaslServer(cbh, protocol);
+ } catch (SaslException e) {
+ return null;
+ }
+ }
+ return null;
+ }
+
+ public String[] getMechanismNames(Map props) {
+ return new String[] { "PLAIN" };
+ }
+ }
+
+ public static class SaslPlainProvider extends java.security.Provider {
+ public SaslPlainProvider() {
+ super("HiveSaslPlain", 1.0, "Hive Plain SASL provider");
+ put("SaslServerFactory.PLAIN", SaslPlainServerFactory.class.getName());
+ }
+ }
+}
diff --git service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
new file mode 100644
index 0000000..99134dd
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
@@ -0,0 +1,78 @@
+package org.apache.hive.service.auth;
+
+import java.net.Socket;
+
+import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Processor;
+import org.apache.hive.service.cli.session.SessionManager;
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCLIService.Iface;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TSaslClientTransport;
+import org.apache.thrift.transport.TSaslServerTransport;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class is responsible for setting the ipAddress for operations executed via HiveServer2.
+ *
+ *
+ *
Ipaddress is only set for operations that calls listeners with hookContext @see ExecuteWithHookContext.
+ *
Ipaddress is only set if the underlying transport mechanism is socket.
+ *
+ *
+ */
+public class TSetIpAddressProcessor extends TCLIService.Processor {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
+
+ public TSetIpAddressProcessor(Iface iface) {
+ super(iface);
+ }
+
+ @Override
+ public boolean process(final TProtocol in, final TProtocol out) throws TException {
+ setIpAddress(in);
+ setUserName(in);
+ return super.process(in, out);
+ }
+
+ private void setUserName(final TProtocol in) {
+ TTransport transport = in.getTransport();
+ if (transport instanceof TSaslServerTransport) {
+ String userName = ((TSaslServerTransport)transport).getSaslServer().getAuthorizationID();
+ SessionManager.setUserName(userName);
+ }
+ }
+
+ protected void setIpAddress(final TProtocol in) {
+ TTransport transport = in.getTransport();
+ TSocket tSocket = getUnderlyingSocketFromTransport(transport);
+ if (tSocket != null) {
+ setIpAddress(tSocket.getSocket());
+ } else {
+ LOGGER.warn("Unknown Transport, cannot determine ipAddress");
+ }
+ }
+
+ private void setIpAddress(Socket socket) {
+ SessionManager.setIpAddress(socket.getInetAddress().toString());
+ }
+
+ private TSocket getUnderlyingSocketFromTransport(TTransport transport) {
+ while (transport != null) {
+ if (transport instanceof TSaslServerTransport) {
+ transport = ((TSaslServerTransport) transport).getUnderlyingTransport();
+ }
+ if (transport instanceof TSaslClientTransport) {
+ transport = ((TSaslClientTransport) transport).getUnderlyingTransport();
+ }
+ if (transport instanceof TSocket) {
+ return (TSocket) transport;
+ }
+ }
+ return null;
+ }
+}
\ No newline at end of file
diff --git service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java
new file mode 100644
index 0000000..12250ec
--- /dev/null
+++ service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java
@@ -0,0 +1,65 @@
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.thrift.TException;
+import org.apache.thrift.TProcessor;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TSaslServerTransport;
+
+public class TUGIContainingProcessor implements TProcessor{
+
+ private final TProcessor wrapped;
+ private final HadoopShims shim;
+ private final boolean isFsCacheDisabled;
+
+ public TUGIContainingProcessor(TProcessor wrapped, Configuration conf) {
+ this.wrapped = wrapped;
+ this.isFsCacheDisabled = conf.getBoolean(String.format("fs.%s.impl.disable.cache",
+ FileSystem.getDefaultUri(conf).getScheme()), false);
+ this.shim = ShimLoader.getHadoopShims();
+ }
+
+ @Override
+ public boolean process(final TProtocol in, final TProtocol out) throws TException {
+ UserGroupInformation clientUgi = null;
+
+ try {
+ clientUgi = shim.createRemoteUser(((TSaslServerTransport)in.getTransport()).
+ getSaslServer().getAuthorizationID(), new ArrayList());
+ return shim.doAs(clientUgi, new PrivilegedExceptionAction() {
+ public Boolean run() {
+ try {
+ return wrapped.process(in, out);
+ } catch (TException te) {
+ throw new RuntimeException(te);
+ }
+ }
+ });
+ }
+ catch (RuntimeException rte) {
+ if (rte.getCause() instanceof TException) {
+ throw (TException)rte.getCause();
+ }
+ throw rte;
+ } catch (InterruptedException ie) {
+ throw new RuntimeException(ie); // unexpected!
+ } catch (IOException ioe) {
+ throw new RuntimeException(ioe); // unexpected!
+ }
+ finally {
+ // cleanup the filesystem handles at the end if they are cached
+ // clientUgi will be null if createRemoteUser() fails
+ if (clientUgi != null && !isFsCacheDisabled) {
+ shim.closeAllForUGI(clientUgi);
+ }
+ }
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/CLIService.java service/src/java/org/apache/hive/service/cli/CLIService.java
new file mode 100644
index 0000000..b53599b
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/CLIService.java
@@ -0,0 +1,328 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.service.CompositeService;
+import org.apache.hive.service.ServiceException;
+import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.cli.session.SessionManager;
+
+/**
+ * CLIService.
+ *
+ */
+public class CLIService extends CompositeService implements ICLIService {
+
+ private final Log LOG = LogFactory.getLog(CLIService.class.getName());
+
+ private HiveConf hiveConf;
+ private SessionManager sessionManager;
+ private IMetaStoreClient metastoreClient;
+ private String serverUserName = null;
+
+
+ public CLIService() {
+ super("CLIService");
+ }
+
+ @Override
+ public synchronized void init(HiveConf hiveConf) {
+ this.hiveConf = hiveConf;
+
+ sessionManager = new SessionManager();
+ addService(sessionManager);
+ try {
+ HiveAuthFactory.loginFromKeytab(hiveConf);
+ serverUserName = ShimLoader.getHadoopShims().
+ getShortUserName(ShimLoader.getHadoopShims().getUGIForConf(hiveConf));
+ } catch (IOException e) {
+ throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
+ } catch (LoginException e) {
+ throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
+ }
+ super.init(hiveConf);
+ }
+
+ @Override
+ public synchronized void start() {
+ super.start();
+
+ // Initialize and test a connection to the metastore
+ try {
+ metastoreClient = new HiveMetaStoreClient(hiveConf);
+ metastoreClient.getDatabases("default");
+ } catch (Exception e) {
+ throw new ServiceException("Unable to connect to MetaStore!", e);
+ }
+ }
+
+ @Override
+ public synchronized void stop() {
+ if (metastoreClient != null) {
+ metastoreClient.close();
+ }
+ super.stop();
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map)
+ */
+ @Override
+ public SessionHandle openSession(String username, String password, Map configuration)
+ throws HiveSQLException {
+ SessionHandle sessionHandle = sessionManager.openSession(username, password, configuration, false, null);
+ LOG.info(sessionHandle + ": openSession()");
+ return sessionHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map)
+ */
+ @Override
+ public SessionHandle openSessionWithImpersonation(String username, String password, Map configuration,
+ String delegationToken) throws HiveSQLException {
+ SessionHandle sessionHandle = sessionManager.openSession(username, password, configuration,
+ true, delegationToken);
+ LOG.info(sessionHandle + ": openSession()");
+ return sessionHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public void closeSession(SessionHandle sessionHandle)
+ throws HiveSQLException {
+ sessionManager.closeSession(sessionHandle);
+ LOG.info(sessionHandle + ": closeSession()");
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List)
+ */
+ @Override
+ public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType getInfoType)
+ throws HiveSQLException {
+ GetInfoValue infoValue = sessionManager.getSession(sessionHandle).getInfo(getInfoType);
+ LOG.info(sessionHandle + ": getInfo()");
+ return infoValue;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#executeStatement(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.util.Map)
+ */
+ @Override
+ public OperationHandle executeStatement(SessionHandle sessionHandle, String statement, Map confOverlay)
+ throws HiveSQLException {
+ OperationHandle opHandle = sessionManager.getSession(sessionHandle)
+ .executeStatement(statement, confOverlay);
+ LOG.info(sessionHandle + ": executeStatement()");
+ return opHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getTypeInfo(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public OperationHandle getTypeInfo(SessionHandle sessionHandle)
+ throws HiveSQLException {
+ OperationHandle opHandle = sessionManager.getSession(sessionHandle).getTypeInfo();
+ LOG.info(sessionHandle + ": getTypeInfo()");
+ return opHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getCatalogs(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public OperationHandle getCatalogs(SessionHandle sessionHandle)
+ throws HiveSQLException {
+ OperationHandle opHandle = sessionManager.getSession(sessionHandle).getCatalogs();
+ LOG.info(sessionHandle + ": getCatalogs()");
+ return opHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String)
+ */
+ @Override
+ public OperationHandle getSchemas(SessionHandle sessionHandle,
+ String catalogName, String schemaName)
+ throws HiveSQLException {
+ OperationHandle opHandle = sessionManager.getSession(sessionHandle)
+ .getSchemas(catalogName, schemaName);
+ LOG.info(sessionHandle + ": getSchemas()");
+ return opHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List)
+ */
+ @Override
+ public OperationHandle getTables(SessionHandle sessionHandle,
+ String catalogName, String schemaName, String tableName, List tableTypes)
+ throws HiveSQLException {
+ OperationHandle opHandle = sessionManager
+ .getSession(sessionHandle).getTables(catalogName, schemaName, tableName, tableTypes);
+ LOG.info(sessionHandle + ": getTables()");
+ return opHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getTableTypes(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public OperationHandle getTableTypes(SessionHandle sessionHandle)
+ throws HiveSQLException {
+ OperationHandle opHandle = sessionManager.getSession(sessionHandle).getTableTypes();
+ LOG.info(sessionHandle + ": getTableTypes()");
+ return opHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getColumns(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public OperationHandle getColumns(SessionHandle sessionHandle,
+ String catalogName, String schemaName, String tableName, String columnName)
+ throws HiveSQLException {
+ OperationHandle opHandle = sessionManager.getSession(sessionHandle)
+ .getColumns(catalogName, schemaName, tableName, columnName);
+ LOG.info(sessionHandle + ": getColumns()");
+ return opHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getFunctions(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public OperationHandle getFunctions(SessionHandle sessionHandle,
+ String catalogName, String schemaName, String functionName)
+ throws HiveSQLException {
+ OperationHandle opHandle = sessionManager.getSession(sessionHandle)
+ .getFunctions(catalogName, schemaName, functionName);
+ LOG.info(sessionHandle + ": getFunctions()");
+ return opHandle;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public OperationState getOperationStatus(OperationHandle opHandle)
+ throws HiveSQLException {
+ OperationState opState = sessionManager.getOperationManager().getOperationState(opHandle);
+ LOG.info(opHandle + ": getOperationStatus()");
+ return opState;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#cancelOperation(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public void cancelOperation(OperationHandle opHandle)
+ throws HiveSQLException {
+ sessionManager.getOperationManager().getOperation(opHandle).
+ getParentSession().cancelOperation(opHandle);
+ LOG.info(opHandle + ": cancelOperation()");
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#closeOperation(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public void closeOperation(OperationHandle opHandle)
+ throws HiveSQLException {
+ sessionManager.getOperationManager().getOperation(opHandle).
+ getParentSession().closeOperation(opHandle);
+ LOG.info(opHandle + ": closeOperation");
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public TableSchema getResultSetMetadata(OperationHandle opHandle)
+ throws HiveSQLException {
+ TableSchema tableSchema = sessionManager.getOperationManager().getOperation(opHandle).
+ getParentSession().getResultSetMetadata(opHandle);
+ LOG.info(opHandle + ": getResultSetMetadata()");
+ return tableSchema;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
+ throws HiveSQLException {
+ RowSet rowSet = sessionManager.getOperationManager().getOperation(opHandle).
+ getParentSession().fetchResults(opHandle, orientation, maxRows);
+ LOG.info(opHandle + ": fetchResults()");
+ return rowSet;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public RowSet fetchResults(OperationHandle opHandle)
+ throws HiveSQLException {
+ RowSet rowSet = sessionManager.getOperationManager().getOperation(opHandle).
+ getParentSession().fetchResults(opHandle);
+ LOG.info(opHandle + ": fetchResults()");
+ return rowSet;
+ }
+
+ // obtain delegation token for the give user from metastore
+ public synchronized String getDelegationTokenFromMetaStore(String owner)
+ throws HiveSQLException, UnsupportedOperationException, LoginException, IOException {
+ if (!hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL) ||
+ !hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_IMPERSONATION)) {
+ throw new UnsupportedOperationException(
+ "delegation token is can only be obtained for a secure remote metastore");
+ }
+
+ try {
+ Hive.closeCurrent();
+ return Hive.get(hiveConf).getDelegationToken(owner, owner);
+ } catch (HiveException e) {
+ if (e.getCause() instanceof UnsupportedOperationException) {
+ throw (UnsupportedOperationException)e.getCause();
+ } else {
+ throw new HiveSQLException("Error connect metastore to setup impersonation", e);
+ }
+ }
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/CLIServiceClient.java service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
new file mode 100644
index 0000000..fe49025
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ * CLIServiceClient.
+ *
+ */
+public abstract class CLIServiceClient implements ICLIService {
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map)
+ */
+ @Override
+ public abstract SessionHandle openSession(String username, String password,
+ Map configuration) throws HiveSQLException;
+
+
+ public SessionHandle openSession(String username, String password)
+ throws HiveSQLException {
+ return openSession(username, password, Collections.emptyMap());
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public abstract void closeSession(SessionHandle sessionHandle) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List)
+ */
+ @Override
+ public abstract GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType getInfoType)
+ throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#executeStatement(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.util.Map)
+ */
+ @Override
+ public abstract OperationHandle executeStatement(SessionHandle sessionHandle, String statement,
+ Map confOverlay) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getTypeInfo(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public abstract OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getCatalogs(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public abstract OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String)
+ */
+ @Override
+ public abstract OperationHandle getSchemas(SessionHandle sessionHandle, String catalogName,
+ String schemaName) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List)
+ */
+ @Override
+ public abstract OperationHandle getTables(SessionHandle sessionHandle, String catalogName,
+ String schemaName, String tableName, List tableTypes) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getTableTypes(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public abstract OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getColumns(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
+ */
+ @Override
+ public abstract OperationHandle getColumns(SessionHandle sessionHandle, String catalogName,
+ String schemaName, String tableName, String columnName) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getFunctions(org.apache.hive.service.cli.SessionHandle, java.lang.String)
+ */
+ @Override
+ public abstract OperationHandle getFunctions(SessionHandle sessionHandle,
+ String catalogName, String schemaName, String functionName)
+ throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public abstract OperationState getOperationStatus(OperationHandle opHandle) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#cancelOperation(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public abstract void cancelOperation(OperationHandle opHandle) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#closeOperation(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public abstract void closeOperation(OperationHandle opHandle) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public abstract TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public abstract RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
+ throws HiveSQLException;
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
+ // TODO: provide STATIC default value
+ return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 1000);
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
new file mode 100644
index 0000000..876ade8
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+/**
+ * CLIServiceUtils.
+ *
+ */
+public class CLIServiceUtils {
+
+
+ private static final char SEARCH_STRING_ESCAPE = '\\';
+
+ /**
+ * Convert a SQL search pattern into an equivalent Java Regex.
+ *
+ * @param pattern input which may contain '%' or '_' wildcard characters, or
+ * these characters escaped using {@link #getSearchStringEscape()}.
+ * @return replace %/_ with regex search characters, also handle escaped
+ * characters.
+ */
+ public static String patternToRegex(String pattern) {
+ if (pattern == null) {
+ return ".*";
+ } else {
+ StringBuilder result = new StringBuilder(pattern.length());
+
+ boolean escaped = false;
+ for (int i = 0, len = pattern.length(); i < len; i++) {
+ char c = pattern.charAt(i);
+ if (escaped) {
+ if (c != SEARCH_STRING_ESCAPE) {
+ escaped = false;
+ }
+ result.append(c);
+ } else {
+ if (c == SEARCH_STRING_ESCAPE) {
+ escaped = true;
+ continue;
+ } else if (c == '%') {
+ result.append(".*");
+ } else if (c == '_') {
+ result.append('.');
+ } else {
+ result.append(Character.toLowerCase(c));
+ }
+ }
+ }
+ return result.toString();
+ }
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
new file mode 100644
index 0000000..d702723
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hive.service.cli.thrift.TColumnDesc;
+
+
+/**
+ * ColumnDescriptor.
+ *
+ */
+public class ColumnDescriptor {
+ private final String name;
+ private final String comment;
+ private final TypeDescriptor type;
+ // ordinal position of this column in the schema
+ private final int position;
+
+ public ColumnDescriptor(String name, String comment, TypeDescriptor type, int position) {
+ this.name = name;
+ this.comment = comment;
+ this.type = type;
+ this.position = position;
+ }
+
+ public ColumnDescriptor(TColumnDesc tColumnDesc) {
+ name = tColumnDesc.getColumnName();
+ comment = tColumnDesc.getComment();
+ type = new TypeDescriptor(tColumnDesc.getTypeDesc());
+ position = tColumnDesc.getPosition();
+ }
+
+ public ColumnDescriptor(FieldSchema column, int position) {
+ name = column.getName();
+ comment = column.getComment();
+ type = new TypeDescriptor(column.getType());
+ this.position = position;
+ }
+
+ public static ColumnDescriptor newPrimitiveColumnDescriptor(String name, String comment, Type type, int position) {
+ return new ColumnDescriptor(name, comment, new TypeDescriptor(type), position);
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public TypeDescriptor getTypeDescriptor() {
+ return type;
+ }
+
+ public int getOrdinalPosition() {
+ return position;
+ }
+
+ public TColumnDesc toTColumnDesc() {
+ TColumnDesc tColumnDesc = new TColumnDesc();
+ tColumnDesc.setColumnName(name);
+ tColumnDesc.setComment(comment);
+ tColumnDesc.setTypeDesc(type.toTTypeDesc());
+ tColumnDesc.setPosition(position);
+ return tColumnDesc;
+ }
+
+ public Type getType() {
+ return type.getType();
+ }
+
+ public boolean isPrimitive() {
+ return type.getType().isPrimitiveType();
+ }
+
+ public String getTypeName() {
+ return type.getTypeName();
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/ColumnValue.java service/src/java/org/apache/hive/service/cli/ColumnValue.java
new file mode 100644
index 0000000..c3327d3
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/ColumnValue.java
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.math.BigDecimal;
+import java.sql.Timestamp;
+
+import org.apache.hive.service.cli.thrift.TBoolValue;
+import org.apache.hive.service.cli.thrift.TByteValue;
+import org.apache.hive.service.cli.thrift.TColumnValue;
+import org.apache.hive.service.cli.thrift.TDoubleValue;
+import org.apache.hive.service.cli.thrift.TI16Value;
+import org.apache.hive.service.cli.thrift.TI32Value;
+import org.apache.hive.service.cli.thrift.TI64Value;
+import org.apache.hive.service.cli.thrift.TStringValue;
+
+/**
+ * ColumnValue.
+ *
+ */
+public class ColumnValue {
+
+ // TODO: replace this with a non-Thrift implementation
+ private final TColumnValue tColumnValue;
+
+ public ColumnValue(TColumnValue tColumnValue) {
+ this.tColumnValue = new TColumnValue(tColumnValue);
+ }
+
+ private static boolean isNull(Object value) {
+ return (value == null);
+ }
+
+ public static ColumnValue booleanValue(Boolean value) {
+ TBoolValue tBoolValue = new TBoolValue();
+ if (value != null) {
+ tBoolValue.setValue(value);
+ }
+ return new ColumnValue(TColumnValue.boolVal(tBoolValue));
+ }
+
+ public static ColumnValue byteValue(Byte value) {
+ TByteValue tByteValue = new TByteValue();
+ if (value != null) {
+ tByteValue.setValue(value);
+ }
+ return new ColumnValue(TColumnValue.byteVal(tByteValue));
+ }
+
+ public static ColumnValue shortValue(Short value) {
+ TI16Value tI16Value = new TI16Value();
+ if (value != null) {
+ tI16Value.setValue(value);
+ }
+ return new ColumnValue(TColumnValue.i16Val(tI16Value));
+ }
+
+ public static ColumnValue intValue(Integer value) {
+ TI32Value tI32Value = new TI32Value();
+ if (value != null) {
+ tI32Value.setValue(value);
+ }
+ return new ColumnValue(TColumnValue.i32Val(tI32Value));
+ }
+
+ public static ColumnValue longValue(Long value) {
+ TI64Value tI64Value = new TI64Value();
+ if (value != null) {
+ tI64Value.setValue(value);
+ }
+ return new ColumnValue(TColumnValue.i64Val(tI64Value));
+ }
+
+ public static ColumnValue floatValue(Float value) {
+ TDoubleValue tDoubleValue = new TDoubleValue();
+ if (value != null) {
+ tDoubleValue.setValue(value);
+ }
+ return new ColumnValue(TColumnValue.doubleVal(tDoubleValue));
+ }
+
+ public static ColumnValue doubleValue(Double value) {
+ TDoubleValue tDoubleValue = new TDoubleValue();
+ if (value != null) {
+ tDoubleValue.setValue(value);
+ }
+ return new ColumnValue(TColumnValue.doubleVal(tDoubleValue));
+ }
+
+ public static ColumnValue stringValue(String value) {
+ TStringValue tStringValue = new TStringValue();
+ if (value != null) {
+ tStringValue.setValue(value);
+ }
+ return new ColumnValue(TColumnValue.stringVal(tStringValue));
+ }
+
+ public static ColumnValue timestampValue(Timestamp value) {
+ TStringValue tStringValue = new TStringValue();
+ if (value != null) {
+ tStringValue.setValue(value.toString());
+ }
+ return new ColumnValue(TColumnValue.stringVal(tStringValue));
+ }
+
+ public static ColumnValue stringValue(BigDecimal value) {
+ TStringValue tStrValue = new TStringValue();
+ if (value != null) {
+ tStrValue.setValue(((BigDecimal)value).toString());
+ }
+ return new ColumnValue(TColumnValue.stringVal(tStrValue));
+ }
+
+ public static ColumnValue newColumnValue(Type type, Object value) {
+ switch (type) {
+ case BOOLEAN_TYPE:
+ return booleanValue((Boolean)value);
+ case TINYINT_TYPE:
+ return byteValue((Byte)value);
+ case SMALLINT_TYPE:
+ return shortValue((Short)value);
+ case INT_TYPE:
+ return intValue((Integer)value);
+ case BIGINT_TYPE:
+ return longValue((Long)value);
+ case FLOAT_TYPE:
+ return floatValue((Float)value);
+ case DOUBLE_TYPE:
+ return doubleValue((Double)value);
+ case STRING_TYPE:
+ return stringValue((String)value);
+ case TIMESTAMP_TYPE:
+ return timestampValue((Timestamp)value);
+ case DECIMAL_TYPE:
+ return stringValue(((BigDecimal)value));
+ case BINARY_TYPE:
+ case ARRAY_TYPE:
+ case MAP_TYPE:
+ case STRUCT_TYPE:
+ case UNION_TYPE:
+ case USER_DEFINED_TYPE:
+ return stringValue((String)value);
+ default:
+ return null;
+ }
+ }
+
+ public TColumnValue toTColumnValue() {
+ return new TColumnValue(tColumnValue);
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
new file mode 100644
index 0000000..38d64c8
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ * EmbeddedCLIServiceClient.
+ *
+ */
+public class EmbeddedCLIServiceClient extends CLIServiceClient {
+ private final ICLIService cliService;
+
+ public EmbeddedCLIServiceClient(ICLIService cliService) {
+ this.cliService = cliService;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#openSession(java.lang.String, java.lang.String, java.util.Map)
+ */
+ @Override
+ public SessionHandle openSession(String username, String password,
+ Map configuration) throws HiveSQLException {
+ return cliService.openSession(username, password, configuration);
+ }
+
+ @Override
+ public SessionHandle openSessionWithImpersonation(String username, String password,
+ Map configuration, String delegationToken) throws HiveSQLException {
+ throw new HiveSQLException("Impersonated session is not supported in the embedded mode");
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#closeSession(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public void closeSession(SessionHandle sessionHandle) throws HiveSQLException {
+ cliService.closeSession(sessionHandle);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List)
+ */
+ @Override
+ public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType getInfoType)
+ throws HiveSQLException {
+ return cliService.getInfo(sessionHandle, getInfoType);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#executeStatement(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.util.Map)
+ */
+ @Override
+ public OperationHandle executeStatement(SessionHandle sessionHandle, String statement,
+ Map confOverlay) throws HiveSQLException {
+ return cliService.executeStatement(sessionHandle, statement, confOverlay);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getTypeInfo(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException {
+ return cliService.getTypeInfo(sessionHandle);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getCatalogs(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException {
+ return cliService.getCatalogs(sessionHandle);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String)
+ */
+ @Override
+ public OperationHandle getSchemas(SessionHandle sessionHandle, String catalogName,
+ String schemaName) throws HiveSQLException {
+ return cliService.getSchemas(sessionHandle, catalogName, schemaName);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List)
+ */
+ @Override
+ public OperationHandle getTables(SessionHandle sessionHandle, String catalogName,
+ String schemaName, String tableName, List tableTypes) throws HiveSQLException {
+ return cliService.getTables(sessionHandle, catalogName, schemaName, tableName, tableTypes);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getTableTypes(org.apache.hive.service.cli.SessionHandle)
+ */
+ @Override
+ public OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException {
+ return cliService.getTableTypes(sessionHandle);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getColumns(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
+ */
+ @Override
+ public OperationHandle getColumns(SessionHandle sessionHandle, String catalogName,
+ String schemaName, String tableName, String columnName) throws HiveSQLException {
+ return cliService.getColumns(sessionHandle, catalogName, schemaName, tableName, columnName);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getFunctions(org.apache.hive.service.cli.SessionHandle, java.lang.String)
+ */
+ @Override
+ public OperationHandle getFunctions(SessionHandle sessionHandle,
+ String catalogName, String schemaName, String functionName)
+ throws HiveSQLException {
+ return cliService.getFunctions(sessionHandle, catalogName, schemaName, functionName);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getOperationStatus(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public OperationState getOperationStatus(OperationHandle opHandle) throws HiveSQLException {
+ return cliService.getOperationStatus(opHandle);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#cancelOperation(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public void cancelOperation(OperationHandle opHandle) throws HiveSQLException {
+ cliService.cancelOperation(opHandle);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#closeOperation(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public void closeOperation(OperationHandle opHandle) throws HiveSQLException {
+ cliService.closeOperation(opHandle);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle)
+ */
+ @Override
+ public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException {
+ return cliService.getResultSetMetadata(opHandle);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.CLIServiceClient#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
+ throws HiveSQLException {
+ return cliService.fetchResults(opHandle, orientation, maxRows);
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/FetchOrientation.java service/src/java/org/apache/hive/service/cli/FetchOrientation.java
new file mode 100644
index 0000000..ffa6f2e
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/FetchOrientation.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import org.apache.hive.service.cli.thrift.TFetchOrientation;
+
+/**
+ * FetchOrientation.
+ *
+ */
+public enum FetchOrientation {
+ FETCH_NEXT(TFetchOrientation.FETCH_NEXT),
+ FETCH_PRIOR(TFetchOrientation.FETCH_PRIOR),
+ FETCH_RELATIVE(TFetchOrientation.FETCH_RELATIVE),
+ FETCH_ABSOLUTE(TFetchOrientation.FETCH_ABSOLUTE),
+ FETCH_FIRST(TFetchOrientation.FETCH_FIRST),
+ FETCH_LAST(TFetchOrientation.FETCH_LAST);
+
+ private TFetchOrientation tFetchOrientation;
+
+ FetchOrientation(TFetchOrientation tFetchOrientation) {
+ this.tFetchOrientation = tFetchOrientation;
+ }
+
+ public static FetchOrientation getFetchOrientation(TFetchOrientation tFetchOrientation) {
+ for (FetchOrientation fetchOrientation : values()) {
+ if (tFetchOrientation.equals(fetchOrientation.toTFetchOrientation())) {
+ return fetchOrientation;
+ }
+ }
+ // TODO: Should this really default to FETCH_NEXT?
+ return FETCH_NEXT;
+ }
+
+ public TFetchOrientation toTFetchOrientation() {
+ return tFetchOrientation;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/GetInfoType.java service/src/java/org/apache/hive/service/cli/GetInfoType.java
new file mode 100644
index 0000000..8dd33a8
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/GetInfoType.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import org.apache.hive.service.cli.thrift.TGetInfoType;
+
+/**
+ * GetInfoType.
+ *
+ */
+public enum GetInfoType {
+ CLI_MAX_DRIVER_CONNECTIONS(TGetInfoType.CLI_MAX_DRIVER_CONNECTIONS),
+ CLI_MAX_CONCURRENT_ACTIVITIES(TGetInfoType.CLI_MAX_CONCURRENT_ACTIVITIES),
+ CLI_DATA_SOURCE_NAME(TGetInfoType.CLI_DATA_SOURCE_NAME),
+ CLI_FETCH_DIRECTION(TGetInfoType.CLI_FETCH_DIRECTION),
+ CLI_SERVER_NAME(TGetInfoType.CLI_SERVER_NAME),
+ CLI_SEARCH_PATTERN_ESCAPE(TGetInfoType.CLI_SEARCH_PATTERN_ESCAPE),
+ CLI_DBMS_NAME(TGetInfoType.CLI_DBMS_NAME),
+ CLI_DBMS_VER(TGetInfoType.CLI_DBMS_VER),
+ CLI_ACCESSIBLE_TABLES(TGetInfoType.CLI_ACCESSIBLE_TABLES),
+ CLI_ACCESSIBLE_PROCEDURES(TGetInfoType.CLI_ACCESSIBLE_PROCEDURES),
+ CLI_CURSOR_COMMIT_BEHAVIOR(TGetInfoType.CLI_CURSOR_COMMIT_BEHAVIOR),
+ CLI_DATA_SOURCE_READ_ONLY(TGetInfoType.CLI_DATA_SOURCE_READ_ONLY),
+ CLI_DEFAULT_TXN_ISOLATION(TGetInfoType.CLI_DEFAULT_TXN_ISOLATION),
+ CLI_IDENTIFIER_CASE(TGetInfoType.CLI_IDENTIFIER_CASE),
+ CLI_IDENTIFIER_QUOTE_CHAR(TGetInfoType.CLI_IDENTIFIER_QUOTE_CHAR),
+ CLI_MAX_COLUMN_NAME_LEN(TGetInfoType.CLI_MAX_COLUMN_NAME_LEN),
+ CLI_MAX_CURSOR_NAME_LEN(TGetInfoType.CLI_MAX_CURSOR_NAME_LEN),
+ CLI_MAX_SCHEMA_NAME_LEN(TGetInfoType.CLI_MAX_SCHEMA_NAME_LEN),
+ CLI_MAX_CATALOG_NAME_LEN(TGetInfoType.CLI_MAX_CATALOG_NAME_LEN),
+ CLI_MAX_TABLE_NAME_LEN(TGetInfoType.CLI_MAX_TABLE_NAME_LEN),
+ CLI_SCROLL_CONCURRENCY(TGetInfoType.CLI_SCROLL_CONCURRENCY),
+ CLI_TXN_CAPABLE(TGetInfoType.CLI_TXN_CAPABLE),
+ CLI_USER_NAME(TGetInfoType.CLI_USER_NAME),
+ CLI_TXN_ISOLATION_OPTION(TGetInfoType.CLI_TXN_ISOLATION_OPTION),
+ CLI_INTEGRITY(TGetInfoType.CLI_INTEGRITY),
+ CLI_GETDATA_EXTENSIONS(TGetInfoType.CLI_GETDATA_EXTENSIONS),
+ CLI_NULL_COLLATION(TGetInfoType.CLI_NULL_COLLATION),
+ CLI_ALTER_TABLE(TGetInfoType.CLI_ALTER_TABLE),
+ CLI_ORDER_BY_COLUMNS_IN_SELECT(TGetInfoType.CLI_ORDER_BY_COLUMNS_IN_SELECT),
+ CLI_SPECIAL_CHARACTERS(TGetInfoType.CLI_SPECIAL_CHARACTERS),
+ CLI_MAX_COLUMNS_IN_GROUP_BY(TGetInfoType.CLI_MAX_COLUMNS_IN_GROUP_BY),
+ CLI_MAX_COLUMNS_IN_INDEX(TGetInfoType.CLI_MAX_COLUMNS_IN_INDEX),
+ CLI_MAX_COLUMNS_IN_ORDER_BY(TGetInfoType.CLI_MAX_COLUMNS_IN_ORDER_BY),
+ CLI_MAX_COLUMNS_IN_SELECT(TGetInfoType.CLI_MAX_COLUMNS_IN_SELECT),
+ CLI_MAX_COLUMNS_IN_TABLE(TGetInfoType.CLI_MAX_COLUMNS_IN_TABLE),
+ CLI_MAX_INDEX_SIZE(TGetInfoType.CLI_MAX_INDEX_SIZE),
+ CLI_MAX_ROW_SIZE(TGetInfoType.CLI_MAX_ROW_SIZE),
+ CLI_MAX_STATEMENT_LEN(TGetInfoType.CLI_MAX_STATEMENT_LEN),
+ CLI_MAX_TABLES_IN_SELECT(TGetInfoType.CLI_MAX_TABLES_IN_SELECT),
+ CLI_MAX_USER_NAME_LEN(TGetInfoType.CLI_MAX_USER_NAME_LEN),
+ CLI_OJ_CAPABILITIES(TGetInfoType.CLI_OJ_CAPABILITIES),
+
+ CLI_XOPEN_CLI_YEAR(TGetInfoType.CLI_XOPEN_CLI_YEAR),
+ CLI_CURSOR_SENSITIVITY(TGetInfoType.CLI_CURSOR_SENSITIVITY),
+ CLI_DESCRIBE_PARAMETER(TGetInfoType.CLI_DESCRIBE_PARAMETER),
+ CLI_CATALOG_NAME(TGetInfoType.CLI_CATALOG_NAME),
+ CLI_COLLATION_SEQ(TGetInfoType.CLI_COLLATION_SEQ),
+ CLI_MAX_IDENTIFIER_LEN(TGetInfoType.CLI_MAX_IDENTIFIER_LEN);
+
+ private final TGetInfoType tInfoType;
+
+ GetInfoType(TGetInfoType tInfoType) {
+ this.tInfoType = tInfoType;
+ }
+
+ public static GetInfoType getGetInfoType(TGetInfoType tGetInfoType) {
+ for (GetInfoType infoType : values()) {
+ if (tGetInfoType.equals(infoType.tInfoType)) {
+ return infoType;
+ }
+ }
+ throw new IllegalArgumentException("Unrecognized Thrift TGetInfoType value: " + tGetInfoType);
+ }
+
+ public TGetInfoType toTGetInfoType() {
+ return tInfoType;
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/GetInfoValue.java service/src/java/org/apache/hive/service/cli/GetInfoValue.java
new file mode 100644
index 0000000..ba92ff4
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/GetInfoValue.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import org.apache.hive.service.cli.thrift.TGetInfoValue;
+
+/**
+ * GetInfoValue.
+ *
+ */
+public class GetInfoValue {
+ private String stringValue = null;
+ private short shortValue;
+ private int intValue;
+ private long longValue;
+
+ public GetInfoValue(String stringValue) {
+ this.stringValue = stringValue;
+ }
+
+ public GetInfoValue(short shortValue) {
+ this.shortValue = shortValue;
+ }
+
+ public GetInfoValue(int intValue) {
+ this.intValue = intValue;
+ }
+
+ public GetInfoValue(long longValue) {
+ this.longValue = longValue;
+ }
+
+ public GetInfoValue(TGetInfoValue tGetInfoValue) {
+ switch (tGetInfoValue.getSetField()) {
+ case STRING_VALUE:
+ stringValue = tGetInfoValue.getStringValue();
+ break;
+ default:
+ throw new IllegalArgumentException("Unreconigzed TGetInfoValue");
+ }
+ }
+
+ public TGetInfoValue toTGetInfoValue() {
+ TGetInfoValue tInfoValue = new TGetInfoValue();
+ if (stringValue != null) {
+ tInfoValue.setStringValue(stringValue);
+ }
+ return tInfoValue;
+ }
+
+ public String getStringValue() {
+ return stringValue;
+ }
+
+ public short getShortValue() {
+ return shortValue;
+ }
+
+ public int getIntValue() {
+ return intValue;
+ }
+
+ public long getLongValue() {
+ return longValue;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/Handle.java service/src/java/org/apache/hive/service/cli/Handle.java
new file mode 100644
index 0000000..cf3427a
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/Handle.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.cli;
+
+import org.apache.hive.service.cli.thrift.THandleIdentifier;
+
+
+
+
+public abstract class Handle {
+
+ private final HandleIdentifier handleId;
+
+ public Handle() {
+ handleId = new HandleIdentifier();
+ }
+
+ public Handle(HandleIdentifier handleId) {
+ this.handleId = handleId;
+ }
+
+ public Handle(THandleIdentifier tHandleIdentifier) {
+ this.handleId = new HandleIdentifier(tHandleIdentifier);
+ }
+
+ public HandleIdentifier getHandleIdentifier() {
+ return handleId;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((handleId == null) ? 0 : handleId.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (!(obj instanceof Handle)) {
+ return false;
+ }
+ Handle other = (Handle) obj;
+ if (handleId == null) {
+ if (other.handleId != null) {
+ return false;
+ }
+ } else if (!handleId.equals(other.handleId)) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public abstract String toString();
+
+}
diff --git service/src/java/org/apache/hive/service/cli/HandleIdentifier.java service/src/java/org/apache/hive/service/cli/HandleIdentifier.java
new file mode 100644
index 0000000..4dc80da
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/HandleIdentifier.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.nio.ByteBuffer;
+import java.util.UUID;
+
+import org.apache.hive.service.cli.thrift.THandleIdentifier;
+
+/**
+ * HandleIdentifier.
+ *
+ */
+public class HandleIdentifier {
+ private final UUID publicId;
+ private final UUID secretId;
+
+ public HandleIdentifier() {
+ publicId = UUID.randomUUID();
+ secretId = UUID.randomUUID();
+ }
+
+ public HandleIdentifier(UUID publicId, UUID secretId) {
+ this.publicId = publicId;
+ this.secretId = secretId;
+ }
+
+ public HandleIdentifier(THandleIdentifier tHandleId) {
+ ByteBuffer bb = ByteBuffer.wrap(tHandleId.getGuid());
+ this.publicId = new UUID(bb.getLong(), bb.getLong());
+ bb = ByteBuffer.wrap(tHandleId.getSecret());
+ this.secretId = new UUID(bb.getLong(), bb.getLong());
+ }
+
+ public UUID getPublicId() {
+ return publicId;
+ }
+
+ public UUID getSecretId() {
+ return secretId;
+ }
+
+ public THandleIdentifier toTHandleIdentifier() {
+ byte[] guid = new byte[16];
+ byte[] secret = new byte[16];
+ ByteBuffer guidBB = ByteBuffer.wrap(guid);
+ ByteBuffer secretBB = ByteBuffer.wrap(secret);
+ guidBB.putLong(publicId.getMostSignificantBits());
+ guidBB.putLong(publicId.getLeastSignificantBits());
+ secretBB.putLong(secretId.getMostSignificantBits());
+ secretBB.putLong(secretId.getLeastSignificantBits());
+ return new THandleIdentifier(ByteBuffer.wrap(guid), ByteBuffer.wrap(secret));
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((publicId == null) ? 0 : publicId.hashCode());
+ result = prime * result + ((secretId == null) ? 0 : secretId.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (!(obj instanceof HandleIdentifier)) {
+ return false;
+ }
+ HandleIdentifier other = (HandleIdentifier) obj;
+ if (publicId == null) {
+ if (other.publicId != null) {
+ return false;
+ }
+ } else if (!publicId.equals(other.publicId)) {
+ return false;
+ }
+ if (secretId == null) {
+ if (other.secretId != null) {
+ return false;
+ }
+ } else if (!secretId.equals(other.secretId)) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return publicId.toString();
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/HiveSQLException.java service/src/java/org/apache/hive/service/cli/HiveSQLException.java
new file mode 100644
index 0000000..74e8b94
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/HiveSQLException.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.sql.SQLException;
+
+import org.apache.hive.service.cli.thrift.TStatus;
+import org.apache.hive.service.cli.thrift.TStatusCode;
+
+/**
+ * HiveSQLException.
+ *
+ */
+public class HiveSQLException extends SQLException {
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = -6095254671958748094L;
+
+ /**
+ *
+ */
+ public HiveSQLException() {
+ super();
+ }
+
+ /**
+ * @param reason
+ */
+ public HiveSQLException(String reason) {
+ super(reason);
+ }
+
+ /**
+ * @param cause
+ */
+ public HiveSQLException(Throwable cause) {
+ super(cause);
+ }
+
+ /**
+ * @param reason
+ * @param sqlState
+ */
+ public HiveSQLException(String reason, String sqlState) {
+ super(reason, sqlState);
+ }
+
+ /**
+ * @param reason
+ * @param cause
+ */
+ public HiveSQLException(String reason, Throwable cause) {
+ super(reason, cause);
+ }
+
+ /**
+ * @param reason
+ * @param sqlState
+ * @param vendorCode
+ */
+ public HiveSQLException(String reason, String sqlState, int vendorCode) {
+ super(reason, sqlState, vendorCode);
+ }
+
+ /**
+ * @param reason
+ * @param sqlState
+ * @param cause
+ */
+ public HiveSQLException(String reason, String sqlState, Throwable cause) {
+ super(reason, sqlState, cause);
+ }
+
+ /**
+ * @param reason
+ * @param sqlState
+ * @param vendorCode
+ * @param cause
+ */
+ public HiveSQLException(String reason, String sqlState, int vendorCode, Throwable cause) {
+ super(reason, sqlState, vendorCode, cause);
+ }
+
+ public HiveSQLException(TStatus status) {
+ // TODO: set correct vendorCode field
+ super(status.getErrorMessage(), status.getSqlState(), 1);
+ }
+
+ public TStatus toTStatus() {
+ // TODO: convert sqlState, etc.
+ TStatus tStatus = new TStatus(TStatusCode.ERROR_STATUS);
+ tStatus.setSqlState(getSQLState());
+ tStatus.setErrorCode(getErrorCode());
+ tStatus.setErrorMessage(getMessage());
+ return tStatus;
+ }
+
+ public static TStatus toTStatus(Exception e) {
+ if (e instanceof HiveSQLException) {
+ return ((HiveSQLException)e).toTStatus();
+ }
+ TStatus tStatus = new TStatus(TStatusCode.ERROR_STATUS);
+ return tStatus;
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/ICLIService.java service/src/java/org/apache/hive/service/cli/ICLIService.java
new file mode 100644
index 0000000..7e863b5
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/ICLIService.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.cli;
+
+import java.util.List;
+import java.util.Map;
+
+
+
+
+public interface ICLIService {
+
+ public abstract SessionHandle openSession(String username, String password,
+ Map configuration)
+ throws HiveSQLException;
+
+ public abstract SessionHandle openSessionWithImpersonation(String username, String password,
+ Map configuration, String delegationToken)
+ throws HiveSQLException;
+
+ public abstract void closeSession(SessionHandle sessionHandle)
+ throws HiveSQLException;
+
+ public abstract GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType infoType)
+ throws HiveSQLException;
+
+ public abstract OperationHandle executeStatement(SessionHandle sessionHandle, String statement,
+ Map confOverlay)
+ throws HiveSQLException;
+
+ public abstract OperationHandle getTypeInfo(SessionHandle sessionHandle)
+ throws HiveSQLException;
+
+ public abstract OperationHandle getCatalogs(SessionHandle sessionHandle)
+ throws HiveSQLException;
+
+ public abstract OperationHandle getSchemas(SessionHandle sessionHandle,
+ String catalogName, String schemaName)
+ throws HiveSQLException;
+
+ public abstract OperationHandle getTables(SessionHandle sessionHandle,
+ String catalogName, String schemaName, String tableName, List tableTypes)
+ throws HiveSQLException;
+
+ public abstract OperationHandle getTableTypes(SessionHandle sessionHandle)
+ throws HiveSQLException;
+
+ public abstract OperationHandle getColumns(SessionHandle sessionHandle,
+ String catalogName, String schemaName, String tableName, String columnName)
+ throws HiveSQLException;
+
+ public abstract OperationHandle getFunctions(SessionHandle sessionHandle,
+ String catalogName, String schemaName, String functionName)
+ throws HiveSQLException;
+
+ public abstract OperationState getOperationStatus(OperationHandle opHandle)
+ throws HiveSQLException;
+
+ public abstract void cancelOperation(OperationHandle opHandle)
+ throws HiveSQLException;
+
+ public abstract void closeOperation(OperationHandle opHandle)
+ throws HiveSQLException;
+
+ public abstract TableSchema getResultSetMetadata(OperationHandle opHandle)
+ throws HiveSQLException;
+
+ public abstract RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation,
+ long maxRows)
+ throws HiveSQLException;
+
+ public abstract RowSet fetchResults(OperationHandle opHandle)
+ throws HiveSQLException;
+
+}
diff --git service/src/java/org/apache/hive/service/cli/OperationHandle.java service/src/java/org/apache/hive/service/cli/OperationHandle.java
new file mode 100644
index 0000000..8f548da
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/OperationHandle.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.cli;
+
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+
+
+public class OperationHandle extends Handle {
+
+ private OperationType opType = OperationType.EXECUTE_STATEMENT;
+ private boolean hasResultSet = false;
+
+ public OperationHandle() {
+ // TODO: make this type abstract
+ super();
+ }
+
+ public OperationHandle(OperationType opType) {
+ super();
+ this.opType = opType;
+ }
+
+ public OperationHandle(TOperationHandle tOperationHandle) {
+ super(tOperationHandle.getOperationId());
+ this.opType = OperationType.getOperationType(tOperationHandle.getOperationType());
+ this.hasResultSet = tOperationHandle.isHasResultSet();
+ }
+
+ public OperationType getOperationType() {
+ return opType;
+ }
+
+ public void setHasResultSet(boolean hasResultSet) {
+ this.hasResultSet = hasResultSet;
+ }
+
+ public boolean hasResultSet() {
+ return hasResultSet;
+ }
+
+ public TOperationHandle toTOperationHandle() {
+ TOperationHandle tOperationHandle = new TOperationHandle();
+ tOperationHandle.setOperationId(getHandleIdentifier().toTHandleIdentifier());
+ tOperationHandle.setOperationType(opType.toTOperationType());
+ tOperationHandle.setHasResultSet(hasResultSet);
+ return tOperationHandle;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = super.hashCode();
+ result = prime * result + ((opType == null) ? 0 : opType.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (!super.equals(obj)) {
+ return false;
+ }
+ if (!(obj instanceof OperationHandle)) {
+ return false;
+ }
+ OperationHandle other = (OperationHandle) obj;
+ if (opType != other.opType) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "OperationHandle [opType=" + opType + ", getHandleIdentifier()=" + getHandleIdentifier()
+ + "]";
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/OperationState.java service/src/java/org/apache/hive/service/cli/OperationState.java
new file mode 100644
index 0000000..ab6ae84
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/OperationState.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import org.apache.hive.service.cli.thrift.TOperationState;
+
+/**
+ * OperationState.
+ *
+ */
+public enum OperationState {
+ INITIALIZED(TOperationState.INITIALIZED_STATE),
+ RUNNING(TOperationState.RUNNING_STATE),
+ FINISHED(TOperationState.FINISHED_STATE),
+ CANCELED(TOperationState.CANCELED_STATE),
+ CLOSED(TOperationState.CLOSED_STATE),
+ ERROR(TOperationState.ERROR_STATE),
+ UNKNOWN(TOperationState.UKNOWN_STATE);
+
+ private final TOperationState tOperationState;
+
+ OperationState(TOperationState tOperationState) {
+ this.tOperationState = tOperationState;
+ }
+
+
+ public static OperationState getOperationState(TOperationState tOperationState) {
+ // TODO: replace this with a Map?
+ for (OperationState opState : values()) {
+ if (tOperationState.equals(opState.tOperationState)) {
+ return opState;
+ }
+ }
+ return OperationState.UNKNOWN;
+ }
+
+ public static void validateTransition(OperationState oldState, OperationState newState)
+ throws HiveSQLException {
+ switch (oldState) {
+ case INITIALIZED:
+ switch (newState) {
+ case RUNNING:
+ case CLOSED:
+ return;
+ }
+ break;
+ case RUNNING:
+ switch (newState) {
+ case FINISHED:
+ case CANCELED:
+ case ERROR:
+ case CLOSED:
+ return;
+ }
+ break;
+ case FINISHED:
+ case CANCELED:
+ case ERROR:
+ if (OperationState.CLOSED.equals(newState)) {
+ return;
+ }
+ default:
+ // fall-through
+ }
+ throw new HiveSQLException("Illegal Operation state transition");
+ }
+
+ public void validateTransition(OperationState newState)
+ throws HiveSQLException {
+ validateTransition(this, newState);
+ }
+
+ public TOperationState toTOperationState() {
+ return tOperationState;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/OperationType.java service/src/java/org/apache/hive/service/cli/OperationType.java
new file mode 100644
index 0000000..429d9a4
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/OperationType.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import org.apache.hive.service.cli.thrift.TOperationType;
+
+/**
+ * OperationType.
+ *
+ */
+public enum OperationType {
+
+ UNKNOWN_OPERATION(TOperationType.UNKNOWN),
+ EXECUTE_STATEMENT(TOperationType.EXECUTE_STATEMENT),
+ GET_TYPE_INFO(TOperationType.GET_TYPE_INFO),
+ GET_CATALOGS(TOperationType.GET_CATALOGS),
+ GET_SCHEMAS(TOperationType.GET_SCHEMAS),
+ GET_TABLES(TOperationType.GET_TABLES),
+ GET_TABLE_TYPES(TOperationType.GET_TABLE_TYPES),
+ GET_COLUMNS(TOperationType.GET_COLUMNS),
+ GET_FUNCTIONS(TOperationType.GET_FUNCTIONS);
+
+ private TOperationType tOperationType;
+
+ OperationType(TOperationType tOpType) {
+ this.tOperationType = tOpType;
+ }
+
+ public static OperationType getOperationType(TOperationType tOperationType) {
+ // TODO: replace this with a Map?
+ for (OperationType opType : values()) {
+ if (tOperationType.equals(opType.tOperationType)) {
+ return opType;
+ }
+ }
+ return OperationType.UNKNOWN_OPERATION;
+ }
+
+ public TOperationType toTOperationType() {
+ return tOperationType;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/PatternOrIdentifier.java service/src/java/org/apache/hive/service/cli/PatternOrIdentifier.java
new file mode 100644
index 0000000..6e4d43f
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/PatternOrIdentifier.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+/**
+ * PatternOrIdentifier.
+ *
+ */
+public class PatternOrIdentifier {
+
+ boolean isPattern = false;
+ String text;
+
+ public PatternOrIdentifier(String tpoi) {
+ text = tpoi;
+ isPattern = false;
+ }
+
+ public boolean isPattern() {
+ return isPattern;
+ }
+
+ public boolean isIdentifier() {
+ return !isPattern;
+ }
+
+ @Override
+ public String toString() {
+ return text;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/Row.java service/src/java/org/apache/hive/service/cli/Row.java
new file mode 100644
index 0000000..e354849
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/Row.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hive.service.cli.thrift.TColumnValue;
+import org.apache.hive.service.cli.thrift.TRow;
+
+/**
+ * Row.
+ *
+ */
+public class Row {
+ private final List values = new ArrayList();
+
+ public Row() {
+ }
+
+ public Row(TRow tRow) {
+ for (TColumnValue tColumnValues : tRow.getColVals()) {
+ values.add(new ColumnValue(tColumnValues));
+ }
+ }
+
+ public Row(TableSchema schema, Object[] fields) {
+ assert fields.length == schema.getColumnDescriptors().size();
+ for (ColumnDescriptor colDesc : schema.getColumnDescriptors()) {
+ TypeDescriptor typeDesc = colDesc.getTypeDescriptor();
+ values.add(ColumnValue.newColumnValue(typeDesc.getType(), fields[colDesc.getOrdinalPosition() - 1]));
+ }
+ }
+
+ public Row addColumnValue(ColumnValue value) {
+ values.add(value);
+ return this;
+ }
+
+ public Row addBoolean(boolean value) {
+ values.add(ColumnValue.booleanValue(value));
+ return this;
+ }
+
+ public Row addByte(byte value) {
+ values.add(ColumnValue.byteValue(value));
+ return this;
+ }
+
+ public Row addString(String value) {
+ values.add(ColumnValue.stringValue(value));
+ return this;
+ }
+
+ public TRow toTRow() {
+ TRow tRow = new TRow();
+ for (ColumnValue columnValue : values) {
+ tRow.addToColVals(columnValue.toTColumnValue());
+ }
+ return tRow;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/RowSet.java service/src/java/org/apache/hive/service/cli/RowSet.java
new file mode 100644
index 0000000..dce506d
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/RowSet.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hive.service.cli.thrift.TRow;
+import org.apache.hive.service.cli.thrift.TRowSet;
+
+/**
+ * RowSet.
+ *
+ */
+public class RowSet {
+
+ private long startOffset = 0;
+ private boolean hasMoreResults = false;
+ private List rows;
+
+ public RowSet() {
+ rows = new ArrayList();
+ }
+
+ public RowSet(TRowSet tRowSet) {
+ this();
+ startOffset = tRowSet.getStartRowOffset();
+ for (TRow tRow : tRowSet.getRows()) {
+ rows.add(new Row(tRow));
+ }
+ }
+
+ public RowSet(List rows, long startOffset) {
+ this();
+ this.rows.addAll(rows);
+ this.startOffset = startOffset;
+ }
+
+ public RowSet addRow(Row row) {
+ rows.add(row);
+ return this;
+ }
+
+ public RowSet addRow(TableSchema schema, Object[] fields) {
+ return addRow(new Row(schema, fields));
+ }
+
+ public RowSet extractSubset(int maxRows) {
+ int numRows = rows.size();
+ maxRows = (maxRows <= numRows) ? maxRows : numRows;
+ RowSet result = new RowSet(rows.subList(0, maxRows), startOffset);
+ rows = new ArrayList(rows.subList(maxRows, numRows));
+ startOffset += result.getSize();
+ return result;
+ }
+
+ public long getStartOffset() {
+ return startOffset;
+ }
+
+ public RowSet setStartOffset(long startOffset) {
+ this.startOffset = startOffset;
+ return this;
+ }
+
+ public boolean getHasMoreResults() {
+ return hasMoreResults;
+ }
+
+ public RowSet setHasMoreResults(boolean hasMoreResults) {
+ this.hasMoreResults = hasMoreResults;
+ return this;
+ }
+
+ public int getSize() {
+ return rows.size();
+ }
+
+ public TRowSet toTRowSet() {
+ TRowSet tRowSet = new TRowSet();
+ tRowSet.setStartRowOffset(startOffset);
+ List tRows = new ArrayList();
+ for (Row row : rows) {
+ tRows.add(row.toTRow());
+ }
+ tRowSet.setRows(tRows);
+
+ /*
+ //List booleanColumn = new ArrayList();
+ //List byteColumn = new ArrayList();
+ //List shortColumn = new ArrayList();
+ List integerColumn = new ArrayList();
+
+ integerColumn.add(1);
+ //integerColumn.add(null);
+ integerColumn.add(3);
+ //integerColumn.add(null);
+
+
+ TColumnUnion column = TColumnUnion.i32Column(integerColumn);
+ List columns = new ArrayList();
+ columns.add(column);
+ tRowSet.setColumns(columns);
+ */
+
+ return tRowSet;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/SessionHandle.java service/src/java/org/apache/hive/service/cli/SessionHandle.java
new file mode 100644
index 0000000..1ed02e2
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/SessionHandle.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.util.UUID;
+
+import org.apache.hive.service.cli.thrift.TSessionHandle;
+
+
+/**
+ * SessionHandle.
+ *
+ */
+public class SessionHandle extends Handle {
+
+ public SessionHandle() {
+ super();
+ }
+
+ public SessionHandle(HandleIdentifier handleId) {
+ super(handleId);
+ }
+
+ public SessionHandle(TSessionHandle tSessionHandle) {
+ super(tSessionHandle.getSessionId());
+ }
+
+ public UUID getSessionId() {
+ return getHandleIdentifier().getPublicId();
+ }
+
+ public TSessionHandle toTSessionHandle() {
+ TSessionHandle tSessionHandle = new TSessionHandle();
+ tSessionHandle.setSessionId(getHandleIdentifier().toTHandleIdentifier());
+ return tSessionHandle;
+ }
+
+ @Override
+ public String toString() {
+ return "SessionHandle [" + getHandleIdentifier() + "]";
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/TableSchema.java service/src/java/org/apache/hive/service/cli/TableSchema.java
new file mode 100644
index 0000000..155f529
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/TableSchema.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hive.service.cli.thrift.TColumnDesc;
+import org.apache.hive.service.cli.thrift.TTableSchema;
+
+/**
+ * TableSchema.
+ *
+ */
+public class TableSchema {
+ private final List columns = new ArrayList();
+
+ public TableSchema() {
+ }
+
+ public TableSchema(int numColumns) {
+ // TODO: remove this constructor
+ }
+
+ public TableSchema(TTableSchema tTableSchema) {
+ for (TColumnDesc tColumnDesc : tTableSchema.getColumns()) {
+ columns.add(new ColumnDescriptor(tColumnDesc));
+ }
+ }
+
+ public TableSchema(List fieldSchemas) {
+ int pos = 1;
+ for (FieldSchema field : fieldSchemas) {
+ columns.add(new ColumnDescriptor(field, pos++));
+ }
+ }
+
+ public TableSchema(Schema schema) {
+ this(schema.getFieldSchemas());
+ }
+
+ public List getColumnDescriptors() {
+ return new ArrayList(columns);
+ }
+
+ public ColumnDescriptor getColumnDescriptorAt(int pos) {
+ return columns.get(pos);
+ }
+
+ public int getSize() {
+ return columns.size();
+ }
+
+ public void clear() {
+ columns.clear();
+ }
+
+
+ public TTableSchema toTTableSchema() {
+ TTableSchema tTableSchema = new TTableSchema();
+ for (ColumnDescriptor col : columns) {
+ tTableSchema.addToColumns(col.toTColumnDesc());
+ }
+ return tTableSchema;
+ }
+
+ public TableSchema addPrimitiveColumn(String columnName, Type columnType, String columnComment) {
+ columns.add(ColumnDescriptor.newPrimitiveColumnDescriptor(columnName, columnComment, columnType, columns.size() + 1));
+ return this;
+ }
+
+ public TableSchema addStringColumn(String columnName, String columnComment) {
+ columns.add(ColumnDescriptor.newPrimitiveColumnDescriptor(columnName, columnComment, Type.STRING_TYPE, columns.size() + 1));
+ return this;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/Type.java service/src/java/org/apache/hive/service/cli/Type.java
new file mode 100644
index 0000000..5d7295d
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/Type.java
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.sql.DatabaseMetaData;
+
+import org.apache.hive.service.cli.thrift.TTypeId;
+
+/**
+ * Type.
+ *
+ */
+public enum Type {
+ BOOLEAN_TYPE("BOOLEAN",
+ java.sql.Types.BOOLEAN,
+ TTypeId.BOOLEAN_TYPE),
+ TINYINT_TYPE("TINYINT",
+ java.sql.Types.TINYINT,
+ TTypeId.TINYINT_TYPE),
+ SMALLINT_TYPE("SMALLINT",
+ java.sql.Types.SMALLINT,
+ TTypeId.SMALLINT_TYPE),
+ INT_TYPE("INT",
+ java.sql.Types.INTEGER,
+ TTypeId.INT_TYPE),
+ BIGINT_TYPE("BIGINT",
+ java.sql.Types.BIGINT,
+ TTypeId.BIGINT_TYPE),
+ FLOAT_TYPE("FLOAT",
+ java.sql.Types.FLOAT,
+ TTypeId.FLOAT_TYPE),
+ DOUBLE_TYPE("DOUBLE",
+ java.sql.Types.DOUBLE,
+ TTypeId.DOUBLE_TYPE),
+ STRING_TYPE("STRING",
+ java.sql.Types.VARCHAR,
+ TTypeId.STRING_TYPE),
+ TIMESTAMP_TYPE("TIMESTAMP",
+ java.sql.Types.TIMESTAMP,
+ TTypeId.TIMESTAMP_TYPE),
+ BINARY_TYPE("BINARY",
+ java.sql.Types.BINARY,
+ TTypeId.BINARY_TYPE),
+ DECIMAL_TYPE("DECIMAL",
+ java.sql.Types.DECIMAL,
+ TTypeId.DECIMAL_TYPE,
+ false, false),
+ ARRAY_TYPE("ARRAY",
+ java.sql.Types.VARCHAR,
+ TTypeId.STRING_TYPE,
+ true, true),
+ MAP_TYPE("MAP",
+ java.sql.Types.VARCHAR,
+ TTypeId.STRING_TYPE,
+ true, true),
+ STRUCT_TYPE("STRUCT",
+ java.sql.Types.VARCHAR,
+ TTypeId.STRING_TYPE,
+ true, false),
+ UNION_TYPE("UNIONTYPE",
+ java.sql.Types.VARCHAR,
+ TTypeId.STRING_TYPE,
+ true, false),
+ USER_DEFINED_TYPE(null,
+ java.sql.Types.VARCHAR,
+ TTypeId.STRING_TYPE,
+ true, false);
+
+ private final String name;
+ private final TTypeId tType;
+ private final int javaSQLType;
+ private final boolean isComplex;
+ private final boolean isCollection;
+
+
+ Type(String name, int javaSQLType, TTypeId tType, boolean isComplex, boolean isCollection) {
+ this.name = name;
+ this.javaSQLType = javaSQLType;
+ this.tType = tType;
+ this.isComplex = isComplex;
+ this.isCollection = isCollection;
+ }
+
+ Type(String name, int javaSqlType, TTypeId tType) {
+ this(name, javaSqlType, tType, false, false);
+ }
+
+ public boolean isPrimitiveType() {
+ return !isComplex;
+ }
+
+ public boolean isComplexType() {
+ return isComplex;
+ }
+
+ public boolean isCollectionType() {
+ return isCollection;
+ }
+
+ public static Type getType(TTypeId tType) {
+ for (Type type : values()) {
+ if (tType.equals(type.tType)) {
+ return type;
+ }
+ }
+ throw new IllegalArgumentException("Unregonized Thrift TTypeId value: " + tType);
+ }
+
+ public static Type getType(String name) {
+ for (Type type : values()) {
+ if (name.equalsIgnoreCase(type.name)) {
+ return type;
+ } else if (type.isComplexType()) {
+ if (name.toUpperCase().startsWith(type.name)) {
+ return type;
+ }
+ }
+ }
+ throw new IllegalArgumentException("Unrecognized type name: " + name);
+ }
+
+ /**
+ * Radix for this type (typically either 2 or 10)
+ * Null is returned for data types where this is not applicable.
+ */
+ public Integer getNumPrecRadix() {
+ switch (this) {
+ case TINYINT_TYPE:
+ case SMALLINT_TYPE:
+ case INT_TYPE:
+ case BIGINT_TYPE:
+ return 10;
+ case FLOAT_TYPE:
+ case DOUBLE_TYPE:
+ return 2;
+ default:
+ // everything else (including boolean and string) is null
+ return null;
+ }
+ }
+
+ /**
+ * The number of fractional digits for this type.
+ * Null is returned for data types where this is not applicable.
+ */
+ public Integer getDecimalDigits() {
+ switch (this) {
+ case BOOLEAN_TYPE:
+ case TINYINT_TYPE:
+ case SMALLINT_TYPE:
+ case INT_TYPE:
+ case BIGINT_TYPE:
+ return 0;
+ case FLOAT_TYPE:
+ return 7;
+ case DOUBLE_TYPE:
+ return 15;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Maximum precision for numeric types.
+ * Returns null for non-numeric types.
+ * @return
+ */
+ public Integer getPrecision() {
+ switch (this) {
+ case TINYINT_TYPE:
+ return 3;
+ case SMALLINT_TYPE:
+ return 5;
+ case INT_TYPE:
+ return 10;
+ case BIGINT_TYPE:
+ return 19;
+ case FLOAT_TYPE:
+ return 7;
+ case DOUBLE_TYPE:
+ return 15;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Scale for this type.
+ */
+ public Integer getScale() {
+ switch (this) {
+ case BOOLEAN_TYPE:
+ case STRING_TYPE:
+ case TIMESTAMP_TYPE:
+ case TINYINT_TYPE:
+ case SMALLINT_TYPE:
+ case INT_TYPE:
+ case BIGINT_TYPE:
+ return 0;
+ case FLOAT_TYPE:
+ return 7;
+ case DOUBLE_TYPE:
+ return 15;
+ case DECIMAL_TYPE:
+ return Integer.MAX_VALUE;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * The column size for this type.
+ * For numeric data this is the maximum precision.
+ * For character data this is the length in characters.
+ * For datetime types this is the length in characters of the String representation
+ * (assuming the maximum allowed precision of the fractional seconds component).
+ * For binary data this is the length in bytes.
+ * Null is returned for for data types where the column size is not applicable.
+ */
+ public Integer getColumnSize() {
+ if (isNumericType()) {
+ return getPrecision();
+ }
+ switch (this) {
+ case STRING_TYPE:
+ case BINARY_TYPE:
+ return Integer.MAX_VALUE;
+ case TIMESTAMP_TYPE:
+ return 30;
+ default:
+ return null;
+ }
+ }
+
+ public boolean isNumericType() {
+ switch (this) {
+ case TINYINT_TYPE:
+ case SMALLINT_TYPE:
+ case INT_TYPE:
+ case BIGINT_TYPE:
+ case FLOAT_TYPE:
+ case DOUBLE_TYPE:
+ case DECIMAL_TYPE:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * Prefix used to quote a literal of this type (may be null)
+ */
+ public String getLiteralPrefix() {
+ return null;
+ }
+
+ /**
+ * Suffix used to quote a literal of this type (may be null)
+ * @return
+ */
+ public String getLiteralSuffix() {
+ return null;
+ }
+
+ /**
+ * Can you use NULL for this type?
+ * @return
+ * DatabaseMetaData.typeNoNulls - does not allow NULL values
+ * DatabaseMetaData.typeNullable - allows NULL values
+ * DatabaseMetaData.typeNullableUnknown - nullability unknown
+ */
+ public Short getNullable() {
+ // All Hive types are nullable
+ return DatabaseMetaData.typeNullable;
+ }
+
+ /**
+ * Is the type case sensitive?
+ * @return
+ */
+ public Boolean isCaseSensitive() {
+ switch (this) {
+ case STRING_TYPE:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * Parameters used in creating the type (may be null)
+ * @return
+ */
+ public String getCreateParams() {
+ return null;
+ }
+
+ /**
+ * Can you use WHERE based on this type?
+ * @return
+ * DatabaseMetaData.typePredNone - No support
+ * DatabaseMetaData.typePredChar - Only support with WHERE .. LIKE
+ * DatabaseMetaData.typePredBasic - Supported except for WHERE .. LIKE
+ * DatabaseMetaData.typeSearchable - Supported for all WHERE ..
+ */
+ public Short getSearchable() {
+ if (isPrimitiveType()) {
+ return DatabaseMetaData.typeSearchable;
+ }
+ return DatabaseMetaData.typePredNone;
+ }
+
+ /**
+ * Is this type unsigned?
+ * @return
+ */
+ public Boolean isUnsignedAttribute() {
+ if (isNumericType()) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Can this type represent money?
+ * @return
+ */
+ public Boolean isFixedPrecScale() {
+ return false;
+ }
+
+ /**
+ * Can this type be used for an auto-increment value?
+ * @return
+ */
+ public Boolean isAutoIncrement() {
+ return false;
+ }
+
+ /**
+ * Localized version of type name (may be null).
+ * @return
+ */
+ public String getLocalizedName() {
+ return null;
+ }
+
+ /**
+ * Minimum scale supported for this type
+ * @return
+ */
+ public Short getMinimumScale() {
+ return 0;
+ }
+
+ /**
+ * Maximum scale supported for this type
+ * @return
+ */
+ public Short getMaximumScale() {
+ return 0;
+ }
+
+ public TTypeId toTType() {
+ return tType;
+ }
+
+ public int toJavaSQLType() {
+ return javaSQLType;
+ }
+
+ public String getName() {
+ return name;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/TypeDescriptor.java service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
new file mode 100644
index 0000000..b4817ad
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import java.util.List;
+
+import org.apache.hive.service.cli.thrift.TPrimitiveTypeEntry;
+import org.apache.hive.service.cli.thrift.TTypeDesc;
+import org.apache.hive.service.cli.thrift.TTypeEntry;
+
+/**
+ * TypeDescriptor.
+ *
+ */
+public class TypeDescriptor {
+
+ private final Type type;
+ private String typeName = null;
+
+ public TypeDescriptor(Type type) {
+ this.type = type;
+ }
+
+ public TypeDescriptor(TTypeDesc tTypeDesc) {
+ List tTypeEntries = tTypeDesc.getTypes();
+ TPrimitiveTypeEntry top = tTypeEntries.get(0).getPrimitiveEntry();
+ this.type = Type.getType(top.getType());
+ }
+
+ public TypeDescriptor(String typeName) {
+ this.type = Type.getType(typeName);
+ if (this.type.isComplexType()) {
+ this.typeName = typeName;
+ }
+ }
+
+ public Type getType() {
+ return type;
+ }
+
+ public TTypeDesc toTTypeDesc() {
+ TTypeEntry entry = TTypeEntry.primitiveEntry(new TPrimitiveTypeEntry(type.toTType()));
+ TTypeDesc desc = new TTypeDesc();
+ desc.addToTypes(entry);
+ return desc;
+ }
+
+ public String getTypeName() {
+ if (typeName != null) {
+ return typeName;
+ } else {
+ return type.getName();
+ }
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java
new file mode 100644
index 0000000..fe0c6db
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.processors.AddResourceProcessor;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * HiveAddResourceOperation.
+ *
+ */
+public class AddResourceOperation extends HiveCommandOperation {
+
+ protected AddResourceOperation(HiveSession parentSession, String statement,
+ Map confOverlay) {
+ super(parentSession, statement, confOverlay);
+ setCommandProcessor(new AddResourceProcessor());
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java
new file mode 100644
index 0000000..496bba9
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.processors.DeleteResourceProcessor;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * HiveDeleteResourceOperation.
+ *
+ */
+public class DeleteResourceOperation extends HiveCommandOperation {
+
+ protected DeleteResourceOperation(HiveSession parentSession, String statement,
+ Map confOverlay) {
+ super(parentSession, statement, confOverlay);
+ setCommandProcessor(new DeleteResourceProcessor());
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java
new file mode 100644
index 0000000..a8b8ed4
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.processors.DfsProcessor;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * HiveDfsCommandOperation.
+ *
+ */
+public class DfsOperation extends HiveCommandOperation {
+
+ protected DfsOperation(HiveSession parentSession, String statement,
+ Map confOverlay) {
+ super(parentSession, statement, confOverlay);
+ setCommandProcessor(new DfsProcessor(parentSession.getHiveConf()));
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java
new file mode 100644
index 0000000..9a1da59
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.cli.operation;
+
+
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.session.HiveSession;
+
+public abstract class ExecuteStatementOperation extends Operation {
+ protected String statement = null;
+ protected Map confOverlay = new HashMap();
+
+ public ExecuteStatementOperation(HiveSession parentSession, String statement, Map confOverlay) {
+ super(parentSession, OperationType.EXECUTE_STATEMENT);
+ this.statement = statement;
+ this.confOverlay = confOverlay;
+ }
+
+ public String getStatement() {
+ return statement;
+ }
+
+ public static ExecuteStatementOperation newExecuteStatementOperation(
+ HiveSession parentSession, String statement, Map confOverlay) {
+ String[] tokens = statement.trim().split("\\s+");
+ String command = tokens[0].toLowerCase();
+
+ if ("set".equals(command)) {
+ return new SetOperation(parentSession, statement, confOverlay);
+ } else if ("dfs".equals(command)) {
+ return new DfsOperation(parentSession, statement, confOverlay);
+ } else if ("add".equals(command)) {
+ return new AddResourceOperation(parentSession, statement, confOverlay);
+ } else if ("delete".equals(command)) {
+ return new DeleteResourceOperation(parentSession, statement, confOverlay);
+ } else {
+ return new SQLOperation(parentSession, statement, confOverlay);
+ }
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
new file mode 100644
index 0000000..581e69c
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetCatalogsOperation.
+ *
+ */
+public class GetCatalogsOperation extends MetadataOperation {
+ private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
+ .addStringColumn("TABLE_CAT", "Catalog name. NULL if not applicable.");
+
+ private final RowSet rowSet = new RowSet();
+
+ protected GetCatalogsOperation(HiveSession parentSession) {
+ super(parentSession, OperationType.GET_CATALOGS);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#run()
+ */
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ setState(OperationState.FINISHED);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+ */
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ return RESULT_SET_SCHEMA;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return rowSet.extractSubset((int)maxRows);
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
new file mode 100644
index 0000000..af87a90
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
@@ -0,0 +1,198 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.sql.DatabaseMetaData;
+import java.util.Collections;
+import java.util.List;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hive.service.cli.ColumnDescriptor;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.Type;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetColumnsOperation.
+ *
+ */
+public class GetColumnsOperation extends MetadataOperation {
+
+ private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
+ .addPrimitiveColumn("TABLE_CAT", Type.STRING_TYPE,
+ "Catalog name. NULL if not applicable")
+ .addPrimitiveColumn("TABLE_SCHEM", Type.STRING_TYPE,
+ "Schema name")
+ .addPrimitiveColumn("TABLE_NAME", Type.STRING_TYPE,
+ "Table name")
+ .addPrimitiveColumn("COLUMN_NAME", Type.STRING_TYPE,
+ "Column name")
+ .addPrimitiveColumn("DATA_TYPE", Type.INT_TYPE,
+ "SQL type from java.sql.Types")
+ .addPrimitiveColumn("TYPE_NAME", Type.STRING_TYPE,
+ "Data source dependent type name, for a UDT the type name is fully qualified")
+ .addPrimitiveColumn("COLUMN_SIZE", Type.INT_TYPE,
+ "Column size. For char or date types this is the maximum number of characters,"
+ + " for numeric or decimal types this is precision.")
+ .addPrimitiveColumn("BUFFER_LENGTH", Type.TINYINT_TYPE,
+ "Unused")
+ .addPrimitiveColumn("DECIMAL_DIGITS", Type.INT_TYPE,
+ "The number of fractional digits")
+ .addPrimitiveColumn("NUM_PREC_RADIX", Type.INT_TYPE,
+ "Radix (typically either 10 or 2)")
+ .addPrimitiveColumn("NULLABLE", Type.INT_TYPE,
+ "Is NULL allowed")
+ .addPrimitiveColumn("REMARKS", Type.STRING_TYPE,
+ "Comment describing column (may be null)")
+ .addPrimitiveColumn("COLUMN_DEF", Type.STRING_TYPE,
+ "Default value (may be null)")
+ .addPrimitiveColumn("SQL_DATA_TYPE", Type.INT_TYPE,
+ "Unused")
+ .addPrimitiveColumn("SQL_DATETIME_SUB", Type.INT_TYPE,
+ "Unused")
+ .addPrimitiveColumn("CHAR_OCTET_LENGTH", Type.INT_TYPE,
+ "For char types the maximum number of bytes in the column")
+ .addPrimitiveColumn("ORDINAL_POSITION", Type.INT_TYPE,
+ "Index of column in table (starting at 1)")
+ .addPrimitiveColumn("IS_NULLABLE", Type.STRING_TYPE,
+ "\"NO\" means column definitely does not allow NULL values; "
+ + "\"YES\" means the column might allow NULL values. An empty "
+ + "string means nobody knows.")
+ .addPrimitiveColumn("SCOPE_CATALOG", Type.STRING_TYPE,
+ "Catalog of table that is the scope of a reference attribute "
+ + "(null if DATA_TYPE isn't REF)")
+ .addPrimitiveColumn("SCOPE_SCHEMA", Type.STRING_TYPE,
+ "Schema of table that is the scope of a reference attribute "
+ + "(null if the DATA_TYPE isn't REF)")
+ .addPrimitiveColumn("SCOPE_TABLE", Type.STRING_TYPE,
+ "Table name that this the scope of a reference attribure "
+ + "(null if the DATA_TYPE isn't REF)")
+ .addPrimitiveColumn("SOURCE_DATA_TYPE", Type.SMALLINT_TYPE,
+ "Source type of a distinct type or user-generated Ref type, "
+ + "SQL type from java.sql.Types (null if DATA_TYPE isn't DISTINCT or user-generated REF)")
+ .addPrimitiveColumn("IS_AUTO_INCREMENT", Type.STRING_TYPE,
+ "Indicates whether this column is auto incremented.");
+
+ private final String catalogName;
+ private final String schemaName;
+ private final String tableName;
+ private final String columnName;
+
+ private final RowSet rowSet = new RowSet();
+
+ protected GetColumnsOperation(HiveSession parentSession, String catalogName, String schemaName,
+ String tableName, String columnName) {
+ super(parentSession, OperationType.GET_COLUMNS);
+ this.catalogName = catalogName;
+ this.schemaName = schemaName;
+ this.tableName = tableName;
+ this.columnName = columnName;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#run()
+ */
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ try {
+ IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
+ String schemaPattern = convertSchemaPattern(schemaName);
+ String tablePattern = convertIdentifierPattern(tableName, true);
+
+ Pattern columnPattern = null;
+ if (columnName != null) {
+ columnPattern = Pattern.compile(convertIdentifierPattern(columnName, false));
+ }
+
+ List dbNames = metastoreClient.getDatabases(schemaPattern);
+ Collections.sort(dbNames);
+ for (String dbName : dbNames) {
+ List tableNames = metastoreClient.getTables(dbName, tablePattern);
+ Collections.sort(tableNames);
+ for (Table table : metastoreClient.getTableObjectsByName(dbName, tableNames)) {
+ TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));
+ for (ColumnDescriptor column : schema.getColumnDescriptors()) {
+ if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
+ continue;
+ }
+ Object[] rowData = new Object[] {
+ null, // TABLE_CAT
+ table.getDbName(), // TABLE_SCHEM
+ table.getTableName(), // TABLE_NAME
+ column.getName(), // COLUMN_NAME
+ column.getType().toJavaSQLType(), // DATA_TYPE
+ column.getTypeName(), // TYPE_NAME
+ column.getType().getColumnSize(), // COLUMN_SIZE
+ null, // BUFFER_LENGTH, unused
+ column.getType().getDecimalDigits(), // DECIMAL_DIGITS
+ column.getType().getNumPrecRadix(), // NUM_PREC_RADIX
+ DatabaseMetaData.columnNullable, // NULLABLE
+ column.getComment(), // REMARKS
+ null, // COLUMN_DEF
+ null, // SQL_DATA_TYPE
+ null, // SQL_DATETIME_SUB
+ null, // CHAR_OCTET_LENGTH
+ column.getOrdinalPosition(), // ORDINAL_POSITION
+ "YES", // IS_NULLABLE
+ null, // SCOPE_CATALOG
+ null, // SCOPE_SCHEMA
+ null, // SCOPE_TABLE
+ null, // SOURCE_DATA_TYPE
+ "NO", // IS_AUTO_INCREMENT
+ };
+ rowSet.addRow(RESULT_SET_SCHEMA, rowData);
+ }
+ }
+ }
+ setState(OperationState.FINISHED);
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException(e);
+ }
+
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+ */
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return RESULT_SET_SCHEMA;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return rowSet.extractSubset((int)maxRows);
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
new file mode 100644
index 0000000..0fe01c0
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.sql.DatabaseMetaData;
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hive.service.cli.CLIServiceUtils;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.Type;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetFunctionsOperation.
+ *
+ */
+public class GetFunctionsOperation extends MetadataOperation {
+ private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
+ .addPrimitiveColumn("FUNCTION_CAT", Type.STRING_TYPE,
+ "Function catalog (may be null)")
+ .addPrimitiveColumn("FUNCTION_SCHEM", Type.STRING_TYPE,
+ "Function schema (may be null)")
+ .addPrimitiveColumn("FUNCTION_NAME", Type.STRING_TYPE,
+ "Function name. This is the name used to invoke the function")
+ .addPrimitiveColumn("REMARKS", Type.STRING_TYPE,
+ "Explanatory comment on the function")
+ .addPrimitiveColumn("FUNCTION_TYPE", Type.INT_TYPE,
+ "Kind of function.")
+ .addPrimitiveColumn("SPECIFIC_NAME", Type.STRING_TYPE,
+ "The name which uniquely identifies this function within its schema");
+
+ private final String catalogName;
+ private final String schemaName;
+ private final String functionName;
+
+ private final RowSet rowSet = new RowSet();
+
+ public GetFunctionsOperation(HiveSession parentSession,
+ String catalogName, String schemaName, String functionName) {
+ super(parentSession, OperationType.GET_FUNCTIONS);
+ this.catalogName = catalogName;
+ this.schemaName = schemaName;
+ this.functionName = functionName;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#run()
+ */
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ try {
+ if ((null == catalogName || "".equals(catalogName))
+ && (null == schemaName || "".equals(schemaName))) {
+ Set functionNames = FunctionRegistry
+ .getFunctionNames(CLIServiceUtils.patternToRegex(functionName));
+ for (String functionName : functionNames) {
+ FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(functionName);
+ Object rowData[] = new Object[] {
+ null, // FUNCTION_CAT
+ null, // FUNCTION_SCHEM
+ functionInfo.getDisplayName(), // FUNCTION_NAME
+ "", // REMARKS
+ (functionInfo.isGenericUDTF() ?
+ DatabaseMetaData.functionReturnsTable
+ : DatabaseMetaData.functionNoTable), // FUNCTION_TYPE
+ functionInfo.getClass().getCanonicalName()
+ };
+ rowSet.addRow(RESULT_SET_SCHEMA, rowData);
+ }
+ }
+ setState(OperationState.FINISHED);
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException(e);
+ }
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+ */
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return RESULT_SET_SCHEMA;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return rowSet.extractSubset((int)maxRows);
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
new file mode 100644
index 0000000..6970f35
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetSchemasOperation.
+ *
+ */
+public class GetSchemasOperation extends MetadataOperation {
+ private final String catalogName;
+ private final String schemaName;
+
+ private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
+ .addStringColumn("TABLE_SCHEMA", "Schema name.")
+ .addStringColumn("TABLE_CATALOG", "Catalog name.");
+
+ private RowSet rowSet;
+
+ protected GetSchemasOperation(HiveSession parentSession,
+ String catalogName, String schemaName) {
+ super(parentSession, OperationType.GET_SCHEMAS);
+ this.catalogName = catalogName;
+ this.schemaName = schemaName;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#run()
+ */
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ rowSet = new RowSet();
+ try {
+ IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
+ String schemaPattern = convertSchemaPattern(schemaName);
+ for (String dbName : metastoreClient.getDatabases(schemaPattern)) {
+ rowSet.addRow(RESULT_SET_SCHEMA, new Object[] {dbName, DEFAULT_HIVE_CATALOG});
+ }
+ setState(OperationState.FINISHED);
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException(e);
+ }
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+ */
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return RESULT_SET_SCHEMA;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return rowSet.extractSubset((int)maxRows);
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
new file mode 100644
index 0000000..eaf867e
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetTableTypesOperation.
+ *
+ */
+public class GetTableTypesOperation extends MetadataOperation {
+
+ protected static TableSchema RESULT_SET_SCHEMA = new TableSchema()
+ .addStringColumn("TABLE_TYPE", "Table type name.");
+
+ private RowSet rowSet;
+
+ protected GetTableTypesOperation(HiveSession parentSession) {
+ super(parentSession, OperationType.GET_TABLE_TYPES);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#run()
+ */
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ try {
+ rowSet = new RowSet();
+ for (TableType type : TableType.values()) {
+ rowSet.addRow(RESULT_SET_SCHEMA, new String[] {type.toString()});
+ }
+ setState(OperationState.FINISHED);
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException(e);
+ }
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+ */
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return RESULT_SET_SCHEMA;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return rowSet.extractSubset((int)maxRows);
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
new file mode 100644
index 0000000..df8b5b3
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetTablesOperation.
+ *
+ */
+public class GetTablesOperation extends MetadataOperation {
+
+ private final String catalogName;
+ private final String schemaName;
+ private final String tableName;
+ private final List tableTypes = new ArrayList();
+ private final RowSet rowSet = new RowSet();
+
+
+ private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
+ .addStringColumn("TABLE_CAT", "Catalog name. NULL if not applicable.")
+ .addStringColumn("TABLE_SCHEMA", "Schema name.")
+ .addStringColumn("TABLE_NAME", "Table name.")
+ .addStringColumn("TABLE_TYPE", "The table type, e.g. \"TABLE\", \"VIEW\", etc.")
+ .addStringColumn("REMARKS", "Comments about the table.");
+
+ protected GetTablesOperation(HiveSession parentSession,
+ String catalogName, String schemaName, String tableName,
+ List tableTypes) {
+ super(parentSession, OperationType.GET_TABLES);
+ this.catalogName = catalogName;
+ this.schemaName = schemaName;
+ this.tableName = tableName;
+ if (tableTypes != null) {
+ this.tableTypes.addAll(tableTypes);
+ }
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#run()
+ */
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ try {
+ IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
+ String schemaPattern = convertSchemaPattern(schemaName);
+ String tablePattern = convertIdentifierPattern(tableName, true);
+ for (String dbName : metastoreClient.getDatabases(schemaPattern)) {
+ List tableNames = metastoreClient.getTables(dbName, tablePattern);
+ for (Table table : metastoreClient.getTableObjectsByName(dbName, tableNames)) {
+ Object[] rowData = new Object[] {
+ DEFAULT_HIVE_CATALOG,
+ table.getDbName(),
+ table.getTableName(),
+ table.getTableType(),
+ table.getParameters().get("comment")
+ };
+ if (tableTypes.isEmpty() || tableTypes.contains(table.getTableType())) {
+ rowSet.addRow(RESULT_SET_SCHEMA, rowData);
+ }
+ }
+ }
+ setState(OperationState.FINISHED);
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException(e);
+ }
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+ */
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return RESULT_SET_SCHEMA;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return rowSet.extractSubset((int)maxRows);
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
new file mode 100644
index 0000000..2daa9cd
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.Type;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetTypeInfoOperation.
+ *
+ */
+public class GetTypeInfoOperation extends MetadataOperation {
+
+ private final static TableSchema RESULT_SET_SCHEMA = new TableSchema()
+ .addPrimitiveColumn("TYPE_NAME", Type.STRING_TYPE,
+ "Type name")
+ .addPrimitiveColumn("DATA_TYPE", Type.INT_TYPE,
+ "SQL data type from java.sql.Types")
+ .addPrimitiveColumn("PRECISION", Type.INT_TYPE,
+ "Maximum precision")
+ .addPrimitiveColumn("LITERAL_PREFIX", Type.STRING_TYPE,
+ "Prefix used to quote a literal (may be null)")
+ .addPrimitiveColumn("LITERAL_SUFFIX", Type.STRING_TYPE,
+ "Suffix used to quote a literal (may be null)")
+ .addPrimitiveColumn("CREATE_PARAMS", Type.STRING_TYPE,
+ "Parameters used in creating the type (may be null)")
+ .addPrimitiveColumn("NULLABLE", Type.SMALLINT_TYPE,
+ "Can you use NULL for this type")
+ .addPrimitiveColumn("CASE_SENSITIVE", Type.BOOLEAN_TYPE,
+ "Is it case sensitive")
+ .addPrimitiveColumn("SEARCHABLE", Type.SMALLINT_TYPE,
+ "Can you use \"WHERE\" based on this type")
+ .addPrimitiveColumn("UNSIGNED_ATTRIBUTE", Type.BOOLEAN_TYPE,
+ "Is it unsigned")
+ .addPrimitiveColumn("FIXED_PREC_SCALE", Type.BOOLEAN_TYPE,
+ "Can it be a money value")
+ .addPrimitiveColumn("AUTO_INCREMENT", Type.BOOLEAN_TYPE,
+ "Can it be used for an auto-increment value")
+ .addPrimitiveColumn("LOCAL_TYPE_NAME", Type.STRING_TYPE,
+ "Localized version of type name (may be null)")
+ .addPrimitiveColumn("MINIMUM_SCALE", Type.SMALLINT_TYPE,
+ "Minimum scale supported")
+ .addPrimitiveColumn("MAXIMUM_SCALE", Type.SMALLINT_TYPE,
+ "Maximum scale supported")
+ .addPrimitiveColumn("SQL_DATA_TYPE", Type.INT_TYPE,
+ "Unused")
+ .addPrimitiveColumn("SQL_DATETIME_SUB", Type.INT_TYPE,
+ "Unused")
+ .addPrimitiveColumn("NUM_PREC_RADIX", Type.INT_TYPE,
+ "Usually 2 or 10");
+
+ private final RowSet rowSet = new RowSet();
+
+ protected GetTypeInfoOperation(HiveSession parentSession) {
+ super(parentSession, OperationType.GET_TYPE_INFO);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#run()
+ */
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ try {
+ for (Type type : Type.values()) {
+ Object[] rowData = new Object[] {
+ type.getName(), // TYPE_NAME
+ type.toJavaSQLType(), // DATA_TYPE
+ type.getPrecision(), // PRECISION
+ type.getLiteralPrefix(), // LITERAL_PREFIX
+ type.getLiteralSuffix(), // LITERAL_SUFFIX
+ type.getCreateParams(), // CREATE_PARAMS
+ type.getNullable(), // NULLABLE
+ type.isCaseSensitive(), // CASE_SENSITIVE
+ type.getSearchable(), // SEARCHABLE
+ type.isUnsignedAttribute(), // UNSIGNED_ATTRIBUTE
+ type.isFixedPrecScale(), // FIXED_PREC_SCALE
+ type.isAutoIncrement(), // AUTO_INCREMENT
+ type.getLocalizedName(), // LOCAL_TYPE_NAME
+ type.getMinimumScale(), // MINIMUM_SCALE
+ type.getMaximumScale(), // MAXIMUM_SCALE
+ null, // SQL_DATA_TYPE, unused
+ null, // SQL_DATETIME_SUB, unused
+ type.getNumPrecRadix() //NUM_PREC_RADIX
+ };
+ rowSet.addRow(RESULT_SET_SCHEMA, rowData);
+ }
+ setState(OperationState.FINISHED);
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException(e);
+ }
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+ */
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return RESULT_SET_SCHEMA;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ return rowSet.extractSubset((int)maxRows);
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
new file mode 100644
index 0000000..60148cb
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.io.UnsupportedEncodingException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * HiveCommandOperation.
+ *
+ */
+public abstract class HiveCommandOperation extends ExecuteStatementOperation {
+ private CommandProcessorResponse response;
+ private CommandProcessor commandProcessor;
+ private TableSchema resultSchema = null;
+
+ /**
+ * For processors other than Hive queries (Driver), they output to session.out (a temp file)
+ * first and the fetchOne/fetchN/fetchAll functions get the output from pipeIn.
+ */
+ private BufferedReader resultReader;
+
+
+ protected HiveCommandOperation(HiveSession parentSession, String statement, Map confOverlay) {
+ super(parentSession, statement, confOverlay);
+ setupSessionIO(parentSession.getSessionState());
+ }
+
+ private void setupSessionIO(SessionState sessionState) {
+ try {
+ LOG.info("Putting temp output to file " + sessionState.getTmpOutputFile().toString());
+ sessionState.in = null; // hive server's session input stream is not used
+ // open a per-session file in auto-flush mode for writing temp results
+ sessionState.out = new PrintStream(new FileOutputStream(sessionState.getTmpOutputFile()), true, "UTF-8");
+ // TODO: for hadoop jobs, progress is printed out to session.err,
+ // we should find a way to feed back job progress to client
+ sessionState.err = new PrintStream(System.err, true, "UTF-8");
+ } catch (IOException e) {
+ LOG.error("Error in creating temp output file ", e);
+ try {
+ sessionState.in = null;
+ sessionState.out = new PrintStream(System.out, true, "UTF-8");
+ sessionState.err = new PrintStream(System.err, true, "UTF-8");
+ } catch (UnsupportedEncodingException ee) {
+ ee.printStackTrace();
+ sessionState.out = null;
+ sessionState.err = null;
+ }
+ }
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.operation.Operation#run()
+ */
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ try {
+ String command = getStatement().trim();
+ String[] tokens = statement.split("\\s");
+ String commandArgs = command.substring(tokens[0].length()).trim();
+
+ response = getCommandProcessor().run(commandArgs);
+ int returnCode = response.getResponseCode();
+ String sqlState = response.getSQLState();
+ String errorMessage = response.getErrorMessage();
+ Schema schema = response.getSchema();
+ if (schema != null) {
+ setHasResultSet(true);
+ resultSchema = new TableSchema(schema);
+ } else {
+ setHasResultSet(false);
+ resultSchema = new TableSchema();
+ }
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException("Error running query: " + e.toString());
+ }
+ setState(OperationState.FINISHED);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.operation.Operation#close()
+ */
+ @Override
+ public void close() throws HiveSQLException {
+ setState(OperationState.CLOSED);
+ cleanTmpFile();
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.operation.Operation#getResultSetSchema()
+ */
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ return resultSchema;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.operation.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
+ */
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ List rows = readResults((int) maxRows);
+ RowSet rowSet = new RowSet();
+
+ for (String row : rows) {
+ rowSet.addRow(resultSchema, new String[] {row});
+ }
+ return rowSet;
+ }
+
+ /**
+ * Reads the temporary results for non-Hive (non-Driver) commands to the
+ * resulting List of strings.
+ * @param results list of strings containing the results
+ * @param nLines number of lines read at once. If it is <= 0, then read all lines.
+ */
+ private List readResults(int nLines) throws HiveSQLException {
+ if (resultReader == null) {
+ SessionState sessionState = getParentSession().getSessionState();
+ File tmp = sessionState.getTmpOutputFile();
+ try {
+ resultReader = new BufferedReader(new FileReader(tmp));
+ } catch (FileNotFoundException e) {
+ LOG.error("File " + tmp + " not found. ", e);
+ throw new HiveSQLException(e);
+ }
+ }
+
+ List results = new ArrayList();
+
+ for (int i = 0; i < nLines || nLines <= 0; ++i) {
+ try {
+ String line = resultReader.readLine();
+ if (line == null) {
+ // reached the end of the result file
+ break;
+ } else {
+ results.add(line);
+ }
+ } catch (IOException e) {
+ LOG.error("Reading temp results encountered an exception: ", e);
+ throw new HiveSQLException(e);
+ }
+ }
+ return results;
+ }
+
+ private void cleanTmpFile() {
+ if (resultReader != null) {
+ SessionState sessionState = getParentSession().getSessionState();
+ File tmp = sessionState.getTmpOutputFile();
+ tmp.delete();
+ resultReader = null;
+ }
+ }
+
+ protected CommandProcessor getCommandProcessor() {
+ return commandProcessor;
+ }
+
+ protected void setCommandProcessor(CommandProcessor commandProcessor) {
+ this.commandProcessor = commandProcessor;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
new file mode 100644
index 0000000..8dc82ab
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * MetadataOperation.
+ *
+ */
+public abstract class MetadataOperation extends Operation {
+
+ protected static final String DEFAULT_HIVE_CATALOG = "";
+ protected static TableSchema RESULT_SET_SCHEMA;
+ private static final char SEARCH_STRING_ESCAPE = '\\';
+
+ protected MetadataOperation(HiveSession parentSession, OperationType opType) {
+ super(parentSession, opType);
+ setHasResultSet(true);
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.apache.hive.service.cli.Operation#close()
+ */
+ @Override
+ public void close() throws HiveSQLException {
+ setState(OperationState.CLOSED);
+ }
+
+ /**
+ * Convert wildchars and escape sequence from JDBC format to datanucleous/regex
+ */
+ protected String convertIdentifierPattern(final String pattern, boolean datanucleusFormat) {
+ if (pattern == null) {
+ return convertPattern("%", true);
+ } else {
+ return convertPattern(pattern, datanucleusFormat);
+ }
+ }
+
+ /**
+ * Convert wildchars and escape sequence of schema pattern from JDBC format to datanucleous/regex
+ * The schema pattern treats empty string also as wildchar
+ */
+ protected String convertSchemaPattern(final String pattern) {
+ if ((pattern == null) || pattern.isEmpty()) {
+ return convertPattern("%", true);
+ } else {
+ return convertPattern(pattern, true);
+ }
+ }
+
+ /**
+ * Convert a pattern containing JDBC catalog search wildcards into
+ * Java regex patterns.
+ *
+ * @param pattern input which may contain '%' or '_' wildcard characters, or
+ * these characters escaped using {@link #getSearchStringEscape()}.
+ * @return replace %/_ with regex search characters, also handle escaped
+ * characters.
+ *
+ * The datanucleus module expects the wildchar as '*'. The columns search on the
+ * other hand is done locally inside the hive code and that requires the regex wildchar
+ * format '.*' This is driven by the datanucleusFormat flag.
+ */
+ private String convertPattern(final String pattern, boolean datanucleusFormat) {
+ String wStr;
+ if (datanucleusFormat) {
+ wStr = "*";
+ } else {
+ wStr = ".*";
+ }
+ return pattern
+ .replaceAll("([^\\\\])%", "$1" + wStr).replaceAll("\\\\%", "%").replaceAll("^%", wStr)
+ .replaceAll("([^\\\\])_", "$1.").replaceAll("\\\\_", "_").replaceAll("^_", ".");
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/Operation.java service/src/java/org/apache/hive/service/cli/operation/Operation.java
new file mode 100644
index 0000000..b354ac9
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.cli.operation;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+
+
+public abstract class Operation {
+ private final HiveSession parentSession;
+ private OperationState state = OperationState.INITIALIZED;
+ private final OperationHandle opHandle;
+ private HiveConf configuration;
+ public static final Log LOG = LogFactory.getLog(Operation.class.getName());
+ public static final long DEFAULT_FETCH_MAX_ROWS = 100;
+ protected boolean hasResultSet;
+
+ protected Operation(HiveSession parentSession, OperationType opType) {
+ super();
+ this.parentSession = parentSession;
+ opHandle = new OperationHandle(opType);
+ }
+
+ public void setConfiguration(HiveConf configuration) {
+ this.configuration = new HiveConf(configuration);
+ }
+
+ public HiveConf getConfiguration() {
+ return new HiveConf(configuration);
+ }
+
+ public HiveSession getParentSession() {
+ return parentSession;
+ }
+
+ public OperationHandle getHandle() {
+ return opHandle;
+ }
+
+ public OperationType getType() {
+ return opHandle.getOperationType();
+ }
+
+ public OperationState getState() {
+ return state;
+ }
+
+ public boolean hasResultSet() {
+ return hasResultSet;
+ }
+
+ protected void setHasResultSet(boolean hasResultSet) {
+ this.hasResultSet = hasResultSet;
+ opHandle.setHasResultSet(hasResultSet);
+ }
+
+ protected final OperationState setState(OperationState newState) throws HiveSQLException {
+ state.validateTransition(newState);
+ this.state = newState;
+ return this.state;
+ }
+
+ protected final void assertState(OperationState state) throws HiveSQLException {
+ if (this.state != state) {
+ throw new HiveSQLException("Expected state " + state + ", but found " + this.state);
+ }
+ }
+
+ public boolean isRunning() {
+ return OperationState.RUNNING.equals(getState());
+ }
+
+ public boolean isFinished() {
+ return OperationState.FINISHED.equals(getState());
+ }
+
+ public boolean isCanceled() {
+ return OperationState.CANCELED.equals(getState());
+ }
+
+ public boolean isFailed() {
+ return OperationState.ERROR.equals(getState());
+ }
+
+ public abstract void run() throws HiveSQLException;
+
+ // TODO: make this abstract and implement in subclasses.
+ public void cancel() throws HiveSQLException {
+ setState(OperationState.CANCELED);
+ throw new UnsupportedOperationException("SQLOperation.cancel()");
+ }
+
+ public abstract void close() throws HiveSQLException;
+
+ public abstract TableSchema getResultSetSchema() throws HiveSQLException;
+
+ public abstract RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException;
+
+ public RowSet getNextRowSet() throws HiveSQLException {
+ return getNextRowSet(FetchOrientation.FETCH_NEXT, DEFAULT_FETCH_MAX_ROWS);
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/OperationManager.java service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
new file mode 100644
index 0000000..e5c763d
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.AbstractService;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * OperationManager.
+ *
+ */
+public class OperationManager extends AbstractService {
+
+ private HiveConf hiveConf;
+ private final Map handleToOperation =
+ new HashMap();
+
+ public OperationManager() {
+ super("OperationManager");
+ }
+
+ @Override
+ public synchronized void init(HiveConf hiveConf) {
+ this.hiveConf = hiveConf;
+
+ super.init(hiveConf);
+ }
+
+ @Override
+ public synchronized void start() {
+ super.start();
+ // TODO
+ }
+
+ @Override
+ public synchronized void stop() {
+ // TODO
+ super.stop();
+ }
+
+ public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession,
+ String statement, Map confOverlay) {
+ ExecuteStatementOperation executeStatementOperation = ExecuteStatementOperation
+ .newExecuteStatementOperation(parentSession, statement, confOverlay);
+ addOperation(executeStatementOperation);
+ return executeStatementOperation;
+ }
+
+ public GetTypeInfoOperation newGetTypeInfoOperation(HiveSession parentSession) {
+ GetTypeInfoOperation operation = new GetTypeInfoOperation(parentSession);
+ addOperation(operation);
+ return operation;
+ }
+
+ public GetCatalogsOperation newGetCatalogsOperation(HiveSession parentSession) {
+ GetCatalogsOperation operation = new GetCatalogsOperation(parentSession);
+ addOperation(operation);
+ return operation;
+ }
+
+ public GetSchemasOperation newGetSchemasOperation(HiveSession parentSession,
+ String catalogName, String schemaName) {
+ GetSchemasOperation operation = new GetSchemasOperation(parentSession, catalogName, schemaName);
+ addOperation(operation);
+ return operation;
+ }
+
+ public MetadataOperation newGetTablesOperation(HiveSession parentSession,
+ String catalogName, String schemaName, String tableName,
+ List tableTypes) {
+ MetadataOperation operation =
+ new GetTablesOperation(parentSession, catalogName, schemaName, tableName, tableTypes);
+ addOperation(operation);
+ return operation;
+ }
+
+ public GetTableTypesOperation newGetTableTypesOperation(HiveSession parentSession) {
+ GetTableTypesOperation operation = new GetTableTypesOperation(parentSession);
+ addOperation(operation);
+ return operation;
+ }
+
+ public GetColumnsOperation newGetColumnsOperation(HiveSession parentSession,
+ String catalogName, String schemaName, String tableName, String columnName) {
+ GetColumnsOperation operation = new GetColumnsOperation(parentSession,
+ catalogName, schemaName, tableName, columnName);
+ addOperation(operation);
+ return operation;
+ }
+
+ public GetFunctionsOperation newGetFunctionsOperation(HiveSession parentSession,
+ String catalogName, String schemaName, String functionName) {
+ GetFunctionsOperation operation = new GetFunctionsOperation(parentSession,
+ catalogName, schemaName, functionName);
+ addOperation(operation);
+ return operation;
+ }
+
+ public synchronized Operation getOperation(OperationHandle operationHandle) throws HiveSQLException {
+ Operation operation = handleToOperation.get(operationHandle);
+ if (operation == null) {
+ throw new HiveSQLException("Invalid OperationHandle: " + operationHandle);
+ }
+ return operation;
+ }
+
+ private synchronized void addOperation(Operation operation) {
+ handleToOperation.put(operation.getHandle(), operation);
+ }
+
+ private synchronized Operation removeOperation(OperationHandle opHandle) {
+ return handleToOperation.remove(opHandle);
+ }
+
+ public OperationState getOperationState(OperationHandle opHandle) throws HiveSQLException {
+ return getOperation(opHandle).getState();
+ }
+
+ public void cancelOperation(OperationHandle opHandle) throws HiveSQLException {
+ getOperation(opHandle).cancel();
+ }
+
+ public void closeOperation(OperationHandle opHandle) throws HiveSQLException {
+ Operation operation = removeOperation(opHandle);
+ if (operation == null) {
+ throw new HiveSQLException("Operation does not exist!");
+ }
+ operation.close();
+ }
+
+ public TableSchema getOperationResultSetSchema(OperationHandle opHandle)
+ throws HiveSQLException {
+ return getOperation(opHandle).getResultSetSchema();
+ }
+
+ public RowSet getOperationNextRowSet(OperationHandle opHandle) throws HiveSQLException {
+ return getOperation(opHandle).getNextRowSet();
+ }
+
+ public RowSet getOperationNextRowSet(OperationHandle opHandle,
+ FetchOrientation orientation, long maxRows)
+ throws HiveSQLException {
+ return getOperation(opHandle).getNextRowSet(orientation, maxRows);
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
new file mode 100644
index 0000000..405f0c7
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -0,0 +1,256 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * SQLOperation.
+ *
+ */
+public class SQLOperation extends ExecuteStatementOperation {
+
+ private Driver driver = null;
+ private CommandProcessorResponse response;
+ private TableSchema resultSchema = null;
+ private Schema mResultSchema = null;
+ private SerDe serde = null;
+
+
+ public SQLOperation(HiveSession parentSession, String statement, Map confOverlay) {
+ // TODO: call setRemoteUser in ExecuteStatementOperation or higher.
+ super(parentSession, statement, confOverlay);
+ }
+
+
+ public void prepare() throws HiveSQLException {
+ }
+
+ @Override
+ public void run() throws HiveSQLException {
+ setState(OperationState.RUNNING);
+ String statement_trimmed = statement.trim();
+ String[] tokens = statement_trimmed.split("\\s");
+ String cmd_1 = statement_trimmed.substring(tokens[0].length()).trim();
+
+ int ret = 0;
+ String errorMessage = "";
+ String SQLState = null;
+
+ try {
+ driver = new Driver(getParentSession().getHiveConf());
+ // In Hive server mode, we are not able to retry in the FetchTask
+ // case, when calling fetch queries since execute() has returned.
+ // For now, we disable the test attempts.
+ driver.setTryCount(Integer.MAX_VALUE);
+
+ String subStatement = new VariableSubstitution().substitute(getParentSession().getHiveConf(), statement);
+
+ response = driver.run(subStatement);
+ if (0 != response.getResponseCode()) {
+ throw new HiveSQLException("Error while processing statement: "
+ + response.getErrorMessage(), response.getSQLState(), response.getResponseCode());
+ }
+
+ mResultSchema = driver.getSchema();
+ if (mResultSchema != null && mResultSchema.isSetFieldSchemas()) {
+ resultSchema = new TableSchema(mResultSchema);
+ setHasResultSet(true);
+ } else {
+ setHasResultSet(false);
+ }
+ } catch (HiveSQLException e) {
+ setState(OperationState.ERROR);
+ throw e;
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException("Error running query: " + e.toString());
+ }
+ setState(OperationState.FINISHED);
+ }
+
+ @Override
+ public void cancel() throws HiveSQLException {
+ setState(OperationState.CANCELED);
+ if (driver != null) {
+ driver.close();
+ driver.destroy();
+ }
+
+ SessionState session = SessionState.get();
+ if (session.getTmpOutputFile() != null) {
+ session.getTmpOutputFile().delete();
+ }
+ }
+
+ @Override
+ public void close() throws HiveSQLException {
+ setState(OperationState.CLOSED);
+ if (driver != null) {
+ driver.close();
+ driver.destroy();
+ }
+
+ SessionState session = SessionState.get();
+ if (session.getTmpOutputFile() != null) {
+ session.getTmpOutputFile().delete();
+ }
+ }
+
+ @Override
+ public TableSchema getResultSetSchema() throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ if (resultSchema == null) {
+ resultSchema = new TableSchema(driver.getSchema());
+ }
+ return resultSchema;
+ }
+
+
+ @Override
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ assertState(OperationState.FINISHED);
+ ArrayList rows = new ArrayList();
+ driver.setMaxRows((int)maxRows);
+
+ try {
+ driver.getResults(rows);
+
+ getSerDe();
+ StructObjectInspector soi = (StructObjectInspector) serde.getObjectInspector();
+ List extends StructField> fieldRefs = soi.getAllStructFieldRefs();
+ RowSet rowSet = new RowSet();
+
+ Object[] deserializedFields = new Object[fieldRefs.size()];
+ Object rowObj;
+ ObjectInspector fieldOI;
+
+ for (String rowString : rows) {
+ rowObj = serde.deserialize(new BytesWritable(rowString.getBytes()));
+ for (int i = 0; i < fieldRefs.size(); i++) {
+ StructField fieldRef = fieldRefs.get(i);
+ fieldOI = fieldRef.getFieldObjectInspector();
+ deserializedFields[i] = convertLazyToJava(soi.getStructFieldData(rowObj, fieldRef), fieldOI);
+ }
+ rowSet.addRow(resultSchema, deserializedFields);
+ }
+ return rowSet;
+ } catch (IOException e) {
+ throw new HiveSQLException(e);
+ } catch (CommandNeedRetryException e) {
+ throw new HiveSQLException(e);
+ } catch (Exception e) {
+ throw new HiveSQLException(e);
+ }
+ }
+
+ /**
+ * Convert a LazyObject to a standard Java object in compliance with JDBC 3.0 (see JDBC 3.0
+ * Specification, Table B-3: Mapping from JDBC Types to Java Object Types).
+ *
+ * This method is kept consistent with {@link HiveResultSetMetaData#hiveTypeToSqlType}.
+ */
+ private static Object convertLazyToJava(Object o, ObjectInspector oi) {
+ Object obj = ObjectInspectorUtils.copyToStandardObject(o, oi, ObjectInspectorCopyOption.JAVA);
+
+ // for now, expose non-primitive as a string
+ // TODO: expose non-primitive as a structured object while maintaining JDBC compliance
+ if (obj != null && oi.getCategory() != ObjectInspector.Category.PRIMITIVE) {
+ obj = obj.toString();
+ }
+
+ return obj;
+ }
+
+
+ private SerDe getSerDe() throws SQLException {
+ if (serde != null) {
+ return serde;
+ }
+ try {
+ List fieldSchemas = mResultSchema.getFieldSchemas();
+ List columnNames = new ArrayList();
+ List columnTypes = new ArrayList();
+ StringBuilder namesSb = new StringBuilder();
+ StringBuilder typesSb = new StringBuilder();
+
+ if (fieldSchemas != null && !fieldSchemas.isEmpty()) {
+ for (int pos = 0; pos < fieldSchemas.size(); pos++) {
+ if (pos != 0) {
+ namesSb.append(",");
+ typesSb.append(",");
+ }
+ columnNames.add(fieldSchemas.get(pos).getName());
+ columnTypes.add(fieldSchemas.get(pos).getType());
+ namesSb.append(fieldSchemas.get(pos).getName());
+ typesSb.append(fieldSchemas.get(pos).getType());
+ }
+ }
+ String names = namesSb.toString();
+ String types = typesSb.toString();
+
+ serde = new LazySimpleSerDe();
+ Properties props = new Properties();
+ if (names.length() > 0) {
+ LOG.debug("Column names: " + names);
+ props.setProperty(serdeConstants.LIST_COLUMNS, names);
+ }
+ if (types.length() > 0) {
+ LOG.debug("Column types: " + types);
+ props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types);
+ }
+ serde.initialize(new HiveConf(), props);
+
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex);
+ }
+ return serde;
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/operation/SetOperation.java service/src/java/org/apache/hive/service/cli/operation/SetOperation.java
new file mode 100644
index 0000000..bf6969a
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/operation/SetOperation.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.processors.SetProcessor;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * HiveSetCommandOperation.
+ *
+ */
+public class SetOperation extends HiveCommandOperation {
+
+ protected SetOperation(HiveSession parentSession, String statement,
+ Map confOverlay) {
+ super(parentSession, statement, confOverlay);
+ setCommandProcessor(new SetProcessor());
+ }
+
+}
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSession.java service/src/java/org/apache/hive/service/cli/session/HiveSession.java
new file mode 100644
index 0000000..5fa8fa1
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/session/HiveSession.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.session;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.GetInfoType;
+import org.apache.hive.service.cli.GetInfoValue;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.operation.OperationManager;
+
+public interface HiveSession {
+ /**
+ * Set the session manager for the session
+ * @param sessionManager
+ */
+ public void setSessionManager(SessionManager sessionManager);
+
+ /**
+ * Set operation manager for the session
+ * @param operationManager
+ */
+ public void setOperationManager(OperationManager operationManager);
+
+ public SessionHandle getSessionHandle();
+
+ public String getUsername();
+
+ public String getPassword();
+
+ public HiveConf getHiveConf();
+
+ public IMetaStoreClient getMetaStoreClient() throws HiveSQLException;
+
+ /**
+ * getInfo operation handler
+ * @param getInfoType
+ * @return
+ * @throws HiveSQLException
+ */
+ public GetInfoValue getInfo(GetInfoType getInfoType) throws HiveSQLException;
+
+ /**
+ * execute operation handler
+ * @param statement
+ * @param confOverlay
+ * @return
+ * @throws HiveSQLException
+ */
+ public OperationHandle executeStatement(String statement,
+ Map confOverlay) throws HiveSQLException;
+
+ /**
+ * getTypeInfo operation handler
+ * @return
+ * @throws HiveSQLException
+ */
+ public OperationHandle getTypeInfo() throws HiveSQLException;
+
+ /**
+ * getCatalogs operation handler
+ * @return
+ * @throws HiveSQLException
+ */
+ public OperationHandle getCatalogs() throws HiveSQLException;
+
+ /**
+ * getSchemas operation handler
+ * @param catalogName
+ * @param schemaName
+ * @return
+ * @throws HiveSQLException
+ */
+ public OperationHandle getSchemas(String catalogName, String schemaName)
+ throws HiveSQLException;
+
+ /**
+ * getTables operation handler
+ * @param catalogName
+ * @param schemaName
+ * @param tableName
+ * @param tableTypes
+ * @return
+ * @throws HiveSQLException
+ */
+ public OperationHandle getTables(String catalogName, String schemaName,
+ String tableName, List tableTypes) throws HiveSQLException;
+
+ /**
+ * getTableTypes operation handler
+ * @return
+ * @throws HiveSQLException
+ */
+ public OperationHandle getTableTypes() throws HiveSQLException ;
+
+ /**
+ * getColumns operation handler
+ * @param catalogName
+ * @param schemaName
+ * @param tableName
+ * @param columnName
+ * @return
+ * @throws HiveSQLException
+ */
+ public OperationHandle getColumns(String catalogName, String schemaName,
+ String tableName, String columnName) throws HiveSQLException;
+
+ /**
+ * getFunctions operation handler
+ * @param catalogName
+ * @param schemaName
+ * @param functionName
+ * @return
+ * @throws HiveSQLException
+ */
+ public OperationHandle getFunctions(String catalogName, String schemaName,
+ String functionName) throws HiveSQLException;
+
+ /**
+ * close the session
+ * @throws HiveSQLException
+ */
+ public void close() throws HiveSQLException;
+
+ public void cancelOperation(OperationHandle opHandle) throws HiveSQLException;
+
+ public void closeOperation(OperationHandle opHandle) throws HiveSQLException;
+
+ public TableSchema getResultSetMetadata(OperationHandle opHandle)
+ throws HiveSQLException;
+
+ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
+ throws HiveSQLException;
+
+ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException;
+
+ public SessionState getSessionState();
+
+ public String getUserName();
+
+ public void setUserName(String userName);
+}
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
new file mode 100644
index 0000000..18594cb
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -0,0 +1,344 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.session;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.GetInfoType;
+import org.apache.hive.service.cli.GetInfoValue;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.operation.ExecuteStatementOperation;
+import org.apache.hive.service.cli.operation.GetCatalogsOperation;
+import org.apache.hive.service.cli.operation.GetColumnsOperation;
+import org.apache.hive.service.cli.operation.GetFunctionsOperation;
+import org.apache.hive.service.cli.operation.GetSchemasOperation;
+import org.apache.hive.service.cli.operation.GetTableTypesOperation;
+import org.apache.hive.service.cli.operation.GetTypeInfoOperation;
+import org.apache.hive.service.cli.operation.MetadataOperation;
+import org.apache.hive.service.cli.operation.OperationManager;
+
+/**
+ * HiveSession
+ *
+ */
+public class HiveSessionImpl implements HiveSession {
+
+ private final SessionHandle sessionHandle = new SessionHandle();
+ private String username;
+ private final String password;
+ private final Map sessionConf = new HashMap();
+ private final HiveConf hiveConf = new HiveConf();
+ private final SessionState sessionState;
+
+ private static final String FETCH_WORK_SERDE_CLASS =
+ "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe";
+
+ private SessionManager sessionManager;
+ private OperationManager operationManager;
+ private IMetaStoreClient metastoreClient = null;
+
+ public HiveSessionImpl(String username, String password, Map sessionConf) {
+ this.username = username;
+ this.password = password;
+
+ if (sessionConf != null) {
+ for (Map.Entry entry : sessionConf.entrySet()) {
+ hiveConf.set(entry.getKey(), entry.getValue());
+ }
+ }
+
+ sessionState = new SessionState(hiveConf);
+ }
+
+ private SessionManager getSessionManager() {
+ return sessionManager;
+ }
+
+ public void setSessionManager(SessionManager sessionManager) {
+ this.sessionManager = sessionManager;
+ }
+
+ private OperationManager getOperationManager() {
+ return operationManager;
+ }
+
+ public void setOperationManager(OperationManager operationManager) {
+ this.operationManager = operationManager;
+ }
+
+ protected synchronized void acquire() throws HiveSQLException {
+ SessionState.start(sessionState);
+ }
+
+ protected synchronized void release() {
+ assert sessionState != null;
+ // no need to release sessionState...
+ }
+
+ public SessionHandle getSessionHandle() {
+ return sessionHandle;
+ }
+
+ public String getUsername() {
+ return username;
+ }
+
+ public String getPassword() {
+ return password;
+ }
+
+ public HiveConf getHiveConf() {
+ hiveConf.setVar(HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE, FETCH_WORK_SERDE_CLASS);
+ return hiveConf;
+ }
+
+ public IMetaStoreClient getMetaStoreClient() throws HiveSQLException {
+ if (metastoreClient == null) {
+ try {
+ metastoreClient = new HiveMetaStoreClient(getHiveConf());
+ } catch (MetaException e) {
+ throw new HiveSQLException(e);
+ }
+ }
+ return metastoreClient;
+ }
+
+ public GetInfoValue getInfo(GetInfoType getInfoType)
+ throws HiveSQLException {
+ acquire();
+ try {
+ switch (getInfoType) {
+ case CLI_SERVER_NAME:
+ return new GetInfoValue("Hive");
+ case CLI_DBMS_NAME:
+ return new GetInfoValue("Apache Hive");
+ case CLI_DBMS_VER:
+ return new GetInfoValue("0.10.0");
+ case CLI_MAX_COLUMN_NAME_LEN:
+ return new GetInfoValue(128);
+ case CLI_MAX_SCHEMA_NAME_LEN:
+ return new GetInfoValue(128);
+ case CLI_MAX_TABLE_NAME_LEN:
+ return new GetInfoValue(128);
+ case CLI_TXN_CAPABLE:
+ default:
+ throw new HiveSQLException("Unrecognized GetInfoType value: " + getInfoType.toString());
+ }
+ } finally {
+ release();
+ }
+ }
+
+ public OperationHandle executeStatement(String statement, Map confOverlay)
+ throws HiveSQLException {
+ acquire();
+ try {
+ ExecuteStatementOperation operation = getOperationManager()
+ .newExecuteStatementOperation(getSession(), statement, confOverlay);
+ operation.run();
+ return operation.getHandle();
+ } finally {
+ release();
+ }
+ }
+
+ public OperationHandle getTypeInfo()
+ throws HiveSQLException {
+ acquire();
+ try {
+ GetTypeInfoOperation operation = getOperationManager().newGetTypeInfoOperation(getSession());
+ operation.run();
+ return operation.getHandle();
+ } finally {
+ release();
+ }
+ }
+
+ public OperationHandle getCatalogs()
+ throws HiveSQLException {
+ acquire();
+ try {
+ GetCatalogsOperation operation = getOperationManager().newGetCatalogsOperation(getSession());
+ operation.run();
+ return operation.getHandle();
+ } finally {
+ release();
+ }
+ }
+
+ public OperationHandle getSchemas(String catalogName, String schemaName)
+ throws HiveSQLException {
+ acquire();
+ try {
+ GetSchemasOperation operation =
+ getOperationManager().newGetSchemasOperation(getSession(), catalogName, schemaName);
+ operation.run();
+ return operation.getHandle();
+ } finally {
+ release();
+ }
+ }
+
+ public OperationHandle getTables(String catalogName, String schemaName, String tableName,
+ List tableTypes)
+ throws HiveSQLException {
+ acquire();
+ try {
+ MetadataOperation operation =
+ getOperationManager().newGetTablesOperation(getSession(), catalogName, schemaName, tableName, tableTypes);
+ operation.run();
+ return operation.getHandle();
+ } finally {
+ release();
+ }
+ }
+
+ public OperationHandle getTableTypes()
+ throws HiveSQLException {
+ acquire();
+ try {
+ GetTableTypesOperation operation = getOperationManager().newGetTableTypesOperation(getSession());
+ operation.run();
+ return operation.getHandle();
+ } finally {
+ release();
+ }
+ }
+
+ public OperationHandle getColumns(String catalogName, String schemaName,
+ String tableName, String columnName) throws HiveSQLException {
+ acquire();
+ try {
+ GetColumnsOperation operation = getOperationManager().newGetColumnsOperation(getSession(),
+ catalogName, schemaName, tableName, columnName);
+ operation.run();
+ return operation.getHandle();
+ } finally {
+ release();
+ }
+ }
+
+ public OperationHandle getFunctions(String catalogName, String schemaName, String functionName)
+ throws HiveSQLException {
+ acquire();
+ try {
+ GetFunctionsOperation operation = getOperationManager()
+ .newGetFunctionsOperation(getSession(), catalogName, schemaName, functionName);
+ operation.run();
+ return operation.getHandle();
+ } finally {
+ release();
+ }
+ }
+
+ public void close() throws HiveSQLException {
+ try {
+ acquire();
+ /**
+ * For metadata operations like getTables(), getColumns() etc,
+ * the session allocates a private metastore handler which should be
+ * closed at the end of the session
+ */
+ if (metastoreClient != null) {
+ metastoreClient.close();
+ }
+ } finally {
+ release();
+ }
+ }
+
+ public SessionState getSessionState() {
+ return sessionState;
+ }
+
+ public String getUserName() {
+ return username;
+ }
+ public void setUserName(String userName) {
+ this.username = userName;
+ }
+
+ @Override
+ public void cancelOperation(OperationHandle opHandle) throws HiveSQLException {
+ acquire();
+ try {
+ sessionManager.getOperationManager().cancelOperation(opHandle);
+ } finally {
+ release();
+ }
+ }
+
+ @Override
+ public void closeOperation(OperationHandle opHandle) throws HiveSQLException {
+ acquire();
+ try {
+ sessionManager.getOperationManager().closeOperation(opHandle);
+ } finally {
+ release();
+ }
+ }
+
+ @Override
+ public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException {
+ acquire();
+ try {
+ return sessionManager.getOperationManager().getOperationResultSetSchema(opHandle);
+ } finally {
+ release();
+ }
+ }
+
+ @Override
+ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
+ throws HiveSQLException {
+ acquire();
+ try {
+ return sessionManager.getOperationManager()
+ .getOperationNextRowSet(opHandle, orientation, maxRows);
+ } finally {
+ release();
+ }
+ }
+
+ @Override
+ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
+ acquire();
+ try {
+ return sessionManager.getOperationManager().getOperationNextRowSet(opHandle);
+ } finally {
+ release();
+ }
+ }
+
+ protected HiveSession getSession() {
+ return this;
+ }
+}
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
new file mode 100644
index 0000000..ae7bb6b
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.session;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.service.cli.HiveSQLException;
+
+/**
+ *
+ * HiveSessionImplwithUGI.
+ * HiveSession with connecting user's UGI and delegation token if required
+ */
+public class HiveSessionImplwithUGI extends HiveSessionImpl {
+ public static final String HS2TOKEN = "HiveServer2ImpersonationToken";
+
+ private UserGroupInformation sessionUgi = null;
+ private String delegationTokenStr = null;
+ private Hive sessionHive = null;
+ private HiveSession proxySession = null;
+
+ public HiveSessionImplwithUGI(String username, String password, Map sessionConf,
+ String delegationToken) throws HiveSQLException {
+ super(username, password, sessionConf);
+ setSessionUGI(username);
+ setUserPath(username);
+ setDelegationToken(delegationToken);
+ }
+
+ // setup appropriate UGI for the session
+ public void setSessionUGI(String owner) throws HiveSQLException {
+ if (owner == null) {
+ throw new HiveSQLException("No username provided for impersonation");
+ }
+ if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
+ try {
+ sessionUgi = ShimLoader.getHadoopShims().createProxyUser(owner);
+ } catch (IOException e) {
+ throw new HiveSQLException("Couldn't setup proxy user", e);
+ }
+ } else {
+ sessionUgi = ShimLoader.getHadoopShims().createRemoteUser(owner, null);
+ }
+ }
+
+ public UserGroupInformation getSessionUgi() {
+ return this.sessionUgi;
+ }
+
+ public String getDelegationToken () {
+ return this.delegationTokenStr;
+ }
+
+ @Override
+ protected synchronized void acquire() throws HiveSQLException {
+ super.acquire();
+ // if we have a metastore connection with impersonation, then set it first
+ if (sessionHive != null) {
+ Hive.set(sessionHive);
+ }
+ }
+
+ /**
+ * close the file systems for the session
+ * cancel the session's delegation token and close the metastore connection
+ */
+ @Override
+ public void close() throws HiveSQLException {
+ try {
+ acquire();
+ ShimLoader.getHadoopShims().closeAllForUGI(sessionUgi);
+ cancelDelegationToken();
+ } finally {
+ release();
+ super.close();
+ }
+ }
+
+ /**
+ * Enable delegation token for the session
+ * save the token string and set the token.signature in hive conf. The metastore client uses
+ * this token.signature to determine where to use kerberos or delegation token
+ * @throws HiveException
+ * @throws IOException
+ */
+ private void setDelegationToken(String delegationTokenStr) throws HiveSQLException {
+ this.delegationTokenStr = delegationTokenStr;
+ if (delegationTokenStr != null) {
+ getHiveConf().set("hive.metastore.token.signature", HS2TOKEN);
+ try {
+ ShimLoader.getHadoopShims().setTokenStr(sessionUgi, delegationTokenStr, HS2TOKEN);
+ } catch (IOException e) {
+ throw new HiveSQLException("Couldn't setup delegation token in the ugi", e);
+ }
+ // create a new metastore connection using the delegation token
+ Hive.set(null);
+ try {
+ sessionHive = Hive.get(getHiveConf());
+ } catch (HiveException e) {
+ throw new HiveSQLException("Failed to setup metastore connection", e);
+ }
+ }
+ }
+
+ // If the session has a delegation token obtained from the metastore, then cancel it
+ private void cancelDelegationToken() throws HiveSQLException {
+ if (delegationTokenStr != null) {
+ try {
+ Hive.get(getHiveConf()).cancelDelegationToken(delegationTokenStr);
+ } catch (HiveException e) {
+ throw new HiveSQLException("Couldn't cancel delegation token", e);
+ }
+ // close the metastore connection created with this delegation token
+ Hive.closeCurrent();
+ }
+ }
+
+ // Append the user name to temp/scratch directory path for each impersonated user
+ private void setUserPath(String userName) {
+ for (HiveConf.ConfVars var: HiveConf.userVars) {
+ String userVar = getHiveConf().getVar(var);
+ if (userVar != null) {
+ // If there's a path separator at end then remove it
+ if (userVar.endsWith(File.separator)) {
+ userVar = userVar.substring(0, userVar.length()-2);
+ }
+ getHiveConf().setVar(var, userVar + "-" + userName);
+ }
+ }
+ }
+
+ @Override
+ protected HiveSession getSession() {
+ assert proxySession != null;
+
+ return proxySession;
+ }
+
+ public void setProxySession(HiveSession proxySession) {
+ this.proxySession = proxySession;
+ }
+
+
+}
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java
new file mode 100644
index 0000000..76f18a9
--- /dev/null
+++ service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.session;
+
+/**
+ * Proxy wrapper on HiveSession to execute operations
+ * by impersonating given user
+ */
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.service.cli.HiveSQLException;
+
+public class HiveSessionProxy implements InvocationHandler {
+ private final HiveSession base;
+ private final UserGroupInformation ugi;
+
+ public HiveSessionProxy(HiveSession hiveSession, UserGroupInformation ugi) {
+ this.base = hiveSession;
+ this.ugi = ugi;
+ }
+
+ public static HiveSession getProxy(HiveSession hiveSession, UserGroupInformation ugi)
+ throws IllegalArgumentException, HiveSQLException {
+ return (HiveSession)Proxy.newProxyInstance(HiveSession.class.getClassLoader(),
+ new Class>[] {HiveSession.class},
+ new HiveSessionProxy(hiveSession, ugi));
+ }
+
+ @Override
+ public Object invoke(Object arg0, final Method method, final Object[] args)
+ throws Throwable {
+ try {
+ return ShimLoader.getHadoopShims().doAs(ugi,
+ new PrivilegedExceptionAction