diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 062e520..97e8677 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -568,7 +568,7 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) {
NEWTABLEDEFAULTPARA("hive.table.parameters.default", "",
"Default property values for newly created tables"),
- DDL_CTL_PARAMETERS_WHITELIST("hive.ddl.createtablelike.properties.whitelist", "",
+ DDL_CTL_PARAMETERS_WHITELIST("hive.ddl.createtablelike.properties.whitelist", "storage_handler,storage_resources",
"Table Properties to copy over when executing a Create Table Like."),
METASTORE_RAW_STORE_IMPL("hive.metastore.rawstore.impl", "org.apache.hadoop.hive.metastore.ObjectStore",
"Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. \n" +
diff --git a/itests/test-serde/pom.xml b/itests/test-serde/pom.xml
index cb79072..2d6eb02 100644
--- a/itests/test-serde/pom.xml
+++ b/itests/test-serde/pom.xml
@@ -40,6 +40,12 @@
${project.version}
true
+
+ org.apache.hive
+ hive-contrib
+ ${project.version}
+ true
+
@@ -52,6 +58,12 @@
${hadoop-20S.version}
true
+
+ org.apache.hadoop
+ hadoop-client
+ ${hadoop-20S.version}
+ true
+
@@ -63,6 +75,12 @@
${hadoop-23.version}
true
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop-23.version}
+ true
+
diff --git a/itests/test-serde/src/main/java/org/apache/hadoop/hive/storagehandler/TestBase64TextOutputFormat.java b/itests/test-serde/src/main/java/org/apache/hadoop/hive/storagehandler/TestBase64TextOutputFormat.java
new file mode 100644
index 0000000..90ab48a
--- /dev/null
+++ b/itests/test-serde/src/main/java/org/apache/hadoop/hive/storagehandler/TestBase64TextOutputFormat.java
@@ -0,0 +1,36 @@
+package org.apache.hadoop.hive.storagehandler;
+
+import java.io.IOException;
+import java.util.Properties;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.Progressable;
+
+/**
+* TestBase64TextOutputFormat
+*/
+public class TestBase64TextOutputFormat
+ extends HiveIgnoreKeyTextOutputFormat {
+ public FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jc, Path finalOutPath,
+ Class extends Writable> valueClass,
+ boolean isCompressed,
+ Properties tableProperties,
+ Progressable progress)
+ throws IOException {
+ Path path = new Path(finalOutPath, "0000_0");
+ FileOutputFormat.setOutputPath(jc, path);
+ Base64TextOutputFormat.Base64RecordWriter writer =
+ new Base64TextOutputFormat.Base64RecordWriter(super
+ .getHiveRecordWriter(jc, path, BytesWritable.class, isCompressed, tableProperties,
+ progress));
+ writer.configure(jc);
+ return writer;
+ }
+}
diff --git a/itests/test-serde/src/main/java/org/apache/hadoop/hive/storagehandler/TestStorageHandler.java b/itests/test-serde/src/main/java/org/apache/hadoop/hive/storagehandler/TestStorageHandler.java
new file mode 100644
index 0000000..80aae69
--- /dev/null
+++ b/itests/test-serde/src/main/java/org/apache/hadoop/hive/storagehandler/TestStorageHandler.java
@@ -0,0 +1,24 @@
+package org.apache.hadoop.hive.storagehandler;
+
+
+import org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat;
+import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.OutputFormat;
+
+/**
+* TestStorageHandler.
+*
+*/
+public class TestStorageHandler extends DefaultStorageHandler {
+
+ @Override
+ public Class extends InputFormat> getInputFormatClass() {
+ return Base64TextInputFormat.class;
+ }
+
+ @Override
+ public Class extends OutputFormat> getOutputFormatClass() {
+ return TestBase64TextOutputFormat.class;
+ }
+}
diff --git a/metastore/if/hive_metastore.thrift b/metastore/if/hive_metastore.thrift
index c2a2419..36f00fa 100755
--- a/metastore/if/hive_metastore.thrift
+++ b/metastore/if/hive_metastore.thrift
@@ -1204,5 +1204,6 @@ const string FILE_OUTPUT_FORMAT = "file.outputformat",
const string META_TABLE_STORAGE = "storage_handler",
const string TABLE_IS_TRANSACTIONAL = "transactional",
const string TABLE_NO_AUTO_COMPACT = "no_auto_compaction",
+const string META_TABLE_STORAGE_RESOURCES = "storage_resources";
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp
index c7bf9ba..bd6514d 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp
@@ -57,6 +57,8 @@ hive_metastoreConstants::hive_metastoreConstants() {
TABLE_NO_AUTO_COMPACT = "no_auto_compaction";
+ META_TABLE_STORAGE_RESOURCES = "storage_resources";
+
}
}}} // namespace
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h
index 35a8a50..dbcc603 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h
@@ -38,6 +38,7 @@ class hive_metastoreConstants {
std::string META_TABLE_STORAGE;
std::string TABLE_IS_TRANSACTIONAL;
std::string TABLE_NO_AUTO_COMPACT;
+ std::string META_TABLE_STORAGE_RESOURCES;
};
extern const hive_metastoreConstants g_hive_metastore_constants;
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java
index 0f2fca8..9865f01 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java
@@ -79,4 +79,6 @@
public static final String TABLE_NO_AUTO_COMPACT = "no_auto_compaction";
+ public static final String META_TABLE_STORAGE_RESOURCES = "storage_resources";
+
}
diff --git a/metastore/src/gen/thrift/gen-php/metastore/Types.php b/metastore/src/gen/thrift/gen-php/metastore/Types.php
index e8afe4d..7e6c938 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/Types.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/Types.php
@@ -13749,4 +13749,6 @@ $GLOBALS['hive_metastore_CONSTANTS']['TABLE_IS_TRANSACTIONAL'] = "transactional"
$GLOBALS['hive_metastore_CONSTANTS']['TABLE_NO_AUTO_COMPACT'] = "no_auto_compaction";
+$GLOBALS['hive_metastore_CONSTANTS']['META_TABLE_STORAGE_RESOURCES'] = "storage_resources";
+
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
old mode 100644
new mode 100755
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py b/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py
index 81f70eb..1c8785c 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py
@@ -32,3 +32,4 @@
META_TABLE_STORAGE = "storage_handler"
TABLE_IS_TRANSACTIONAL = "transactional"
TABLE_NO_AUTO_COMPACT = "no_auto_compaction"
+META_TABLE_STORAGE_RESOURCES = "storage_resources"
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb
index 3208ecd..9e3a60b 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb
@@ -53,3 +53,5 @@ TABLE_IS_TRANSACTIONAL = %q"transactional"
TABLE_NO_AUTO_COMPACT = %q"no_auto_compaction"
+META_TABLE_STORAGE_RESOURCES = %q"storage_resources"
+
diff --git a/metastore/src/model/org/apache/hadoop/hive/metastore/model/MStorageDescriptor.java b/metastore/src/model/org/apache/hadoop/hive/metastore/model/MStorageDescriptor.java
index 9da3071..fbdf5a1 100644
--- a/metastore/src/model/org/apache/hadoop/hive/metastore/model/MStorageDescriptor.java
+++ b/metastore/src/model/org/apache/hadoop/hive/metastore/model/MStorageDescriptor.java
@@ -252,7 +252,7 @@ public void setSkewedColValues(List skewedColValues) {
}
/**
- * @param skewedColValueLocationMaps the skewedColValueLocationMaps to set
+ * @param listBucketColValuesMapping the skewedColValueLocationMaps to set
*/
public void setSkewedColValueLocationMaps(Map listBucketColValuesMapping) {
this.skewedColValueLocationMaps = listBucketColValuesMapping;
@@ -273,5 +273,4 @@ public boolean isStoredAsSubDirectories() {
public void setStoredAsSubDirectories(boolean storedAsSubDirectories) {
this.isStoredAsSubDirectories = storedAsSubDirectories;
}
-
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 089bd94..f732089 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -84,6 +84,7 @@
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.TxnInfo;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -3882,9 +3883,12 @@ private int createTable(Hive db, CreateTableDesc crtTbl) throws HiveException {
}
if (crtTbl.getStorageHandler() != null) {
- tbl.setProperty(
- org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
- crtTbl.getStorageHandler());
+ tbl.setProperty(hive_metastoreConstants.META_TABLE_STORAGE, crtTbl.getStorageHandler());
+ }
+
+ if(crtTbl.getStorageHandlerResources()!=null){
+ tbl.setProperty(hive_metastoreConstants.META_TABLE_STORAGE_RESOURCES,
+ Utilities.serializeResourceUris(crtTbl.getStorageHandlerResources()));
}
HiveStorageHandler storageHandler = tbl.getStorageHandler();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
index 569c125..1d56df0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
@@ -26,11 +26,9 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.ResourceType;
import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.QueryPlan;
@@ -44,6 +42,7 @@
import org.apache.hadoop.hive.ql.plan.FunctionWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.util.SemanticAnalyzerHelper;
import org.apache.hadoop.util.StringUtils;
/**
@@ -125,7 +124,7 @@ private int createPermanentFunction(Hive db, CreateFunctionDesc createFunctionDe
checkLocalFunctionResources(db, createFunctionDesc.getResources());
// Add any required resources
- addFunctionResources(createFunctionDesc.getResources());
+ SemanticAnalyzerHelper.addResourcesToCurrentSession(createFunctionDesc.getResources());
// UDF class should exist
Class> udfClass = getUdfClass(createFunctionDesc);
@@ -275,30 +274,8 @@ private void checkLocalFunctionResources(Hive db, List resources)
}
}
-
- private static SessionState.ResourceType getResourceType(ResourceType rt) throws HiveException {
- switch (rt) {
- case JAR:
- return SessionState.ResourceType.JAR;
- case FILE:
- return SessionState.ResourceType.FILE;
- case ARCHIVE:
- return SessionState.ResourceType.ARCHIVE;
- default:
- throw new HiveException("Unexpected resource type " + rt);
- }
- }
-
public static void addFunctionResources(List resources) throws HiveException {
- if (resources != null) {
- for (ResourceUri res : resources) {
- String addedResource =
- SessionState.get().add_resource(getResourceType(res.getResourceType()), res.getUri());
- if (addedResource == null) {
- throw new HiveException("Unable to load " + res.getResourceType() + " " + res.getUri());
- }
- }
- }
+ SemanticAnalyzerHelper.addResourcesToCurrentSession(resources);
}
private Class> getUdfClass(CreateFunctionDesc desc) throws ClassNotFoundException {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 7d72783..1904269 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.exec;
+import com.esotericsoftware.kryo.serializers.FieldSerializer;
import java.beans.DefaultPersistenceDelegate;
import java.beans.Encoder;
import java.beans.ExceptionListener;
@@ -103,6 +104,7 @@
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -186,7 +188,6 @@
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
-import com.esotericsoftware.kryo.serializers.FieldSerializer;
import com.esotericsoftware.shaded.org.objenesis.strategy.StdInstantiatorStrategy;
/**
@@ -812,6 +813,53 @@ public static ExprNodeGenericFuncDesc deserializeExpression(String s) {
return func;
}
+ /**
+ * Serializes resource uri via Kryo.
+ *
+ * @param uris List of ResourceUri.
+ * @return String.
+ */
+ public static String serializeResourceUris(List uris) {
+ try {
+ return new String(Base64.encodeBase64(serializeResourceUrisToKryo(uris)), "UTF-8");
+ } catch (UnsupportedEncodingException ex) {
+ throw new RuntimeException("UTF-8 support required", ex);
+ }
+ }
+
+ /**
+ * Deserializes resource uris from Kryo.
+ *
+ * @param uris string containing the resource uris.
+ * @return Expression; null if deserialization succeeded, but the result type is incorrect.
+ */
+ public static List deserializeResourceUris(String uris) {
+ try {
+ return deserializeResourceUrisFromKryo(Base64.decodeBase64(uris.getBytes("UTF-8")));
+ } catch (UnsupportedEncodingException ex) {
+ throw new RuntimeException("UTF-8 support required", ex);
+ }
+ }
+
+ public static byte[] serializeResourceUrisToKryo(List uris) {
+ Kryo kryo = runtimeSerializationKryo.get();
+ kryo.register(ResourceUri.class);
+ kryo.register(uris.getClass());
+
+ ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+ Output output = new Output(byteArrayOutputStream);
+ kryo.writeObject(output, uris);
+ output.flush();
+
+ return byteArrayOutputStream.toByteArray();
+ }
+
+ public static List deserializeResourceUrisFromKryo(byte[] bytes){
+ Input input = new Input(new ByteArrayInputStream(bytes));
+ List r = new ArrayList<>();
+ return runtimeSerializationKryo.get().readObject(input, r.getClass());
+ }
+
public static String serializeObject(Serializable expr) {
try {
return new String(Base64.encodeBase64(serializeObjectToKryo(expr)), "UTF-8");
@@ -3781,4 +3829,4 @@ public static String getQualifiedPath(HiveConf conf, Path path) throws HiveExcep
public static boolean isDefaultNameNode(HiveConf conf) {
return !conf.getChangedProperties().containsKey(HiveConf.ConfVars.HADOOPFS.varname);
}
-}
+}
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
index c4633f6..c245b0a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
@@ -27,6 +27,7 @@
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator;
@@ -36,6 +37,7 @@
import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.util.SemanticAnalyzerHelper;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.ReflectionUtils;
@@ -301,11 +303,20 @@ public static String unparseIdentifier(String identifier, Configuration conf) {
public static HiveStorageHandler getStorageHandler(
Configuration conf, String className) throws HiveException {
+ return getStorageHandler(conf, className, null);
+ }
+
+ public static HiveStorageHandler getStorageHandler(
+ Configuration conf, String className, List storageResources) throws HiveException {
if (className == null) {
return null;
}
try {
+ if (storageResources != null) {
+ SemanticAnalyzerHelper.addResourcesToCurrentSession(storageResources);
+ }
+
Class extends HiveStorageHandler> handlerClass =
(Class extends HiveStorageHandler>)
Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
index 69a4545..547c9d4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
@@ -284,10 +284,9 @@ public HiveStorageHandler getStorageHandler() {
return storageHandler;
}
try {
- storageHandler = HiveUtils.getStorageHandler(
- Hive.get().getConf(),
- getProperty(
- org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE));
+ storageHandler = HiveUtils.getStorageHandler(Hive.get().getConf(),
+ getProperty(hive_metastoreConstants.META_TABLE_STORAGE), Utilities.deserializeResourceUris(
+ getProperty(hive_metastoreConstants.META_TABLE_STORAGE_RESOURCES)));
} catch (Exception e) {
throw new RuntimeException(e);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
index 1ef6d1b..6a66934 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.ql.parse;
-import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
@@ -25,7 +24,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.ResourceType;
import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -37,7 +35,7 @@
import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
import org.apache.hadoop.hive.ql.plan.FunctionWork;
-import org.apache.hadoop.hive.ql.plan.PlanUtils;
+import org.apache.hadoop.hive.ql.util.SemanticAnalyzerHelper;
/**
* FunctionSemanticAnalyzer.
@@ -75,7 +73,7 @@ private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
}
// find any referenced resources
- List resources = getResourceList(ast);
+ List resources = SemanticAnalyzerHelper.getResourceList(ast);
CreateFunctionDesc desc =
new CreateFunctionDesc(functionName, isTemporaryFunction, className, resources);
@@ -109,46 +107,6 @@ private void analyzeDropFunction(ASTNode ast) throws SemanticException {
addEntities(functionName, isTemporaryFunction, null);
}
- private ResourceType getResourceType(ASTNode token) throws SemanticException {
- switch (token.getType()) {
- case HiveParser.TOK_JAR:
- return ResourceType.JAR;
- case HiveParser.TOK_FILE:
- return ResourceType.FILE;
- case HiveParser.TOK_ARCHIVE:
- return ResourceType.ARCHIVE;
- default:
- throw new SemanticException("Unexpected token " + token.toString());
- }
- }
-
- private List getResourceList(ASTNode ast) throws SemanticException {
- List resources = null;
- ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST);
-
- if (resourcesNode != null) {
- resources = new ArrayList();
- for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) {
- // ^(TOK_RESOURCE_URI $resType $resPath)
- ASTNode resNode = (ASTNode) resourcesNode.getChild(idx);
- if (resNode.getToken().getType() != HiveParser.TOK_RESOURCE_URI) {
- throw new SemanticException("Expected token type TOK_RESOURCE_URI but found "
- + resNode.getToken().toString());
- }
- if (resNode.getChildCount() != 2) {
- throw new SemanticException("Expected 2 child nodes of TOK_RESOURCE_URI but found "
- + resNode.getChildCount());
- }
- ASTNode resTypeNode = (ASTNode) resNode.getChild(0);
- ASTNode resUriNode = (ASTNode) resNode.getChild(1);
- ResourceType resourceType = getResourceType(resTypeNode);
- resources.add(new ResourceUri(resourceType, PlanUtils.stripQuotes(resUriNode.getText())));
- }
- }
-
- return resources;
- }
-
/**
* Add write entities to the semantic analyzer to restrict function creation to privileged users.
*/
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 149b788..5a5249f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -1847,8 +1847,8 @@ tableFileFormat
KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
-> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
| KW_STORED KW_BY storageHandler=StringLiteral
- (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
- -> ^(TOK_STORAGEHANDLER $storageHandler $serdeprops?)
+ (KW_USING rList=resourceList)? (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
+ -> ^(TOK_STORAGEHANDLER $storageHandler $rList? $serdeprops?)
| KW_STORED KW_AS genericSpec=identifier
-> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index bdb9204..f8c3f61 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -10613,7 +10613,7 @@ ASTNode analyzeCreateTable(
skewedValues);
crtTblDesc.setStoredAsSubDirectories(storedAsDirs);
crtTblDesc.setNullFormat(rowFormatParams.nullFormat);
-
+ crtTblDesc.setStorageHandlerResources(storageFormat.getStorageHandlerResources());
crtTblDesc.validate(conf);
// outputs is empty, which means this create table happens in the current
// database.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java
index 7723430..a83763f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java
@@ -18,15 +18,20 @@
package org.apache.hadoop.hive.ql.parse;
import static org.apache.hadoop.hive.ql.parse.ParseUtils.ensureClassExists;
+
+import java.util.ArrayList;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.ql.io.IOConstants;
import org.apache.hadoop.hive.ql.io.StorageFormatDescriptor;
import org.apache.hadoop.hive.ql.io.StorageFormatFactory;
+import org.apache.hadoop.hive.ql.util.SemanticAnalyzerHelper;
public class StorageFormat {
private static final StorageFormatFactory storageFormatFactory = new StorageFormatFactory();
@@ -36,10 +41,12 @@
private String storageHandler;
private String serde;
private final Map serdeProps;
+ private final List storageHandlerResources;
public StorageFormat(Configuration conf) {
this.conf = conf;
- this.serdeProps = new HashMap();
+ this.serdeProps = new HashMap<>();
+ this.storageHandlerResources = new ArrayList<>();
}
/**
@@ -61,12 +68,26 @@ public boolean fillStorageFormat(ASTNode child) throws SemanticException {
}
break;
case HiveParser.TOK_STORAGEHANDLER:
- storageHandler = ensureClassExists(BaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
- if (child.getChildCount() == 2) {
- BaseSemanticAnalyzer.readProps(
- (ASTNode) (child.getChild(1).getChild(0)),
- serdeProps);
+ storageHandler = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
+
+ // The first element is storageHandler class name
+ for (int i = 1; i < child.getChildCount(); i++) {
+ if (child.getChild(i).getType() == HiveParser.TOK_RESOURCE_LIST) {
+ storageHandlerResources.addAll(SemanticAnalyzerHelper.getResourceList(child));
+ } else if (child.getChild(i).getType() == HiveParser.TOK_TABLEPROPERTIES) {
+ BaseSemanticAnalyzer.readProps(
+ (ASTNode) (child.getChild(i).getChild(0)),
+ serdeProps);
+ } else {
+ throw new SemanticException("Invalid sub-statement for storage format");
+ }
+ }
+
+ if (storageHandlerResources.isEmpty()) {
+ // If no external jar used, check the existence of the class
+ ensureClassExists(storageHandler);
}
+
break;
case HiveParser.TOK_FILEFORMAT_GENERIC:
ASTNode grandChild = (ASTNode)child.getChild(0);
@@ -142,4 +163,8 @@ public String getSerde() {
public Map getSerdeProps() {
return serdeProps;
}
+
+ public List getStorageHandlerResources() {
+ return storageHandlerResources;
+ }
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
index 8cadb96..14dc440 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
@@ -29,6 +29,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
@@ -69,6 +70,7 @@
String location;
String serName;
String storageHandler;
+ List storageHandlerResources;
Map serdeProps;
Map tblProps;
boolean ifNotExists;
@@ -81,21 +83,58 @@ public CreateTableDesc() {
}
public CreateTableDesc(String databaseName, String tableName, boolean isExternal, boolean isTemporary,
+ List cols, List partCols,
+ List bucketCols, List sortCols, int numBuckets,
+ String fieldDelim, String fieldEscape, String collItemDelim,
+ String mapKeyDelim, String lineDelim, String comment, String inputFormat,
+ String outputFormat, String location, String serName,
+ String storageHandler,
+ Map serdeProps,
+ Map tblProps,
+ boolean ifNotExists, List skewedColNames, List> skewedColValues) {
+
+ this(tableName, isExternal, isTemporary, cols, partCols,
+ bucketCols, sortCols, numBuckets, fieldDelim, fieldEscape,
+ collItemDelim, mapKeyDelim, lineDelim, comment, inputFormat,
+ outputFormat, location, serName, storageHandler, null,
+ serdeProps, tblProps, ifNotExists, skewedColNames, skewedColValues);
+
+ this.databaseName = databaseName;
+ }
+
+ public CreateTableDesc(String tableName, boolean isExternal, boolean isTemporary,
+ List cols, List partCols,
+ List bucketCols, List sortCols, int numBuckets,
+ String fieldDelim, String fieldEscape, String collItemDelim,
+ String mapKeyDelim, String lineDelim, String comment, String inputFormat,
+ String outputFormat, String location, String serName,
+ String storageHandler,
+ Map serdeProps,
+ Map tblProps,
+ boolean ifNotExists, List skewedColNames, List> skewedColValues) {
+ this(tableName, isExternal, isTemporary, cols, partCols,
+ bucketCols, sortCols, numBuckets, fieldDelim, fieldEscape,
+ collItemDelim, mapKeyDelim, lineDelim, comment, inputFormat,
+ outputFormat, location, serName, storageHandler, null,
+ serdeProps, tblProps, ifNotExists, skewedColNames, skewedColValues);
+ }
+
+ public CreateTableDesc(String databaseName, String tableName, boolean isExternal, boolean isTemporary,
List cols, List partCols,
List bucketCols, List sortCols, int numBuckets,
String fieldDelim, String fieldEscape, String collItemDelim,
String mapKeyDelim, String lineDelim, String comment, String inputFormat,
String outputFormat, String location, String serName,
- String storageHandler,
+ String storageHandler, List storageHandlerResources,
Map serdeProps,
Map tblProps,
boolean ifNotExists, List skewedColNames, List> skewedColValues) {
this(tableName, isExternal, isTemporary, cols, partCols,
- bucketCols, sortCols, numBuckets, fieldDelim, fieldEscape,
- collItemDelim, mapKeyDelim, lineDelim, comment, inputFormat,
- outputFormat, location, serName, storageHandler, serdeProps,
- tblProps, ifNotExists, skewedColNames, skewedColValues);
+ bucketCols, sortCols, numBuckets, fieldDelim, fieldEscape,
+ collItemDelim, mapKeyDelim, lineDelim, comment, inputFormat,
+ outputFormat, location, serName, storageHandler, storageHandlerResources,
+ serdeProps, tblProps, ifNotExists, skewedColNames, skewedColValues);
this.databaseName = databaseName;
}
@@ -106,7 +145,7 @@ public CreateTableDesc(String tableName, boolean isExternal, boolean isTemporary
String fieldDelim, String fieldEscape, String collItemDelim,
String mapKeyDelim, String lineDelim, String comment, String inputFormat,
String outputFormat, String location, String serName,
- String storageHandler,
+ String storageHandler, List storageHandlerResources,
Map serdeProps,
Map tblProps,
boolean ifNotExists, List skewedColNames, List> skewedColValues) {
@@ -129,6 +168,7 @@ public CreateTableDesc(String tableName, boolean isExternal, boolean isTemporary
this.partCols = new ArrayList(partCols);
this.serName = serName;
this.storageHandler = storageHandler;
+ this.storageHandlerResources = storageHandlerResources;
this.serdeProps = serdeProps;
this.tblProps = tblProps;
this.ifNotExists = ifNotExists;
@@ -549,4 +589,11 @@ public void setTemporary(boolean isTemporary) {
this.isTemporary = isTemporary;
}
+ public List getStorageHandlerResources() {
+ return storageHandlerResources;
+ }
+
+ public void setStorageHandlerResources(List storageHandlerResources) {
+ this.storageHandlerResources = storageHandlerResources;
+ }
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/SemanticAnalyzerHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/util/SemanticAnalyzerHelper.java
new file mode 100644
index 0000000..ca3e5fb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/util/SemanticAnalyzerHelper.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.util;
+
+import org.apache.hadoop.hive.metastore.api.*;
+import org.apache.hadoop.hive.ql.metadata.*;
+import org.apache.hadoop.hive.ql.parse.*;
+import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.session.*;
+
+import java.util.*;
+
+public class SemanticAnalyzerHelper {
+
+ private static ResourceType getResourceType(ASTNode token) throws SemanticException {
+ switch (token.getType()) {
+ case HiveParser.TOK_JAR:
+ return ResourceType.JAR;
+ case HiveParser.TOK_FILE:
+ return ResourceType.FILE;
+ case HiveParser.TOK_ARCHIVE:
+ return ResourceType.ARCHIVE;
+ default:
+ throw new SemanticException("Unexpected token " + token.toString());
+ }
+ }
+
+ public static List getResourceList(ASTNode ast) throws SemanticException {
+ List resources = null;
+ ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST);
+
+ if (resourcesNode != null) {
+ resources = new ArrayList<>();
+ for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) {
+ // ^(TOK_RESOURCE_URI $resType $resPath)
+ ASTNode resNode = (ASTNode) resourcesNode.getChild(idx);
+ if (resNode.getToken().getType() != HiveParser.TOK_RESOURCE_URI) {
+ throw new SemanticException("Expected token type TOK_RESOURCE_URI but found "
+ + resNode.getToken().toString());
+ }
+ if (resNode.getChildCount() != 2) {
+ throw new SemanticException("Expected 2 child nodes of TOK_RESOURCE_URI but found "
+ + resNode.getChildCount());
+ }
+ ASTNode resTypeNode = (ASTNode) resNode.getChild(0);
+ ASTNode resUriNode = (ASTNode) resNode.getChild(1);
+ ResourceType resourceType = getResourceType(resTypeNode);
+ resources.add(new ResourceUri(resourceType, PlanUtils.stripQuotes(resUriNode.getText())));
+ }
+ }
+
+ return resources;
+ }
+
+ private static SessionState.ResourceType convertToSessionStateResourceType(ResourceType rt)
+ throws HiveException {
+ switch (rt) {
+ case JAR:
+ return SessionState.ResourceType.JAR;
+ case FILE:
+ return SessionState.ResourceType.FILE;
+ case ARCHIVE:
+ return SessionState.ResourceType.ARCHIVE;
+ default:
+ throw new HiveException("Unexpected resource type " + rt);
+ }
+ }
+
+ public static void addResourcesToCurrentSession(List resources) throws HiveException {
+ if (resources != null) {
+ for (ResourceUri res : resources) {
+ String addedResource =
+ SessionState.get()
+ .add_resource(convertToSessionStateResourceType(res.getResourceType()), res.getUri());
+ if (addedResource == null) {
+ throw new HiveException("Unable to load " + res.getResourceType() + " " + res.getUri());
+ }
+ }
+ }
+ }
+
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
index 69f8889..f2cd608 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
@@ -26,17 +26,17 @@
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
-import java.util.Set;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
-import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.ResourceType;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -46,6 +46,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.JobConf;
+import org.junit.Assert;
public class TestUtilities extends TestCase {
public static final Log LOG = LogFactory.getLog(TestUtilities.class);
@@ -140,4 +141,22 @@ public void testGetJarFilesByPath() {
FileUtils.deleteQuietly(f);
}
}
+
+ public void testSerDeResouceUris(){
+ ResourceUri u1 = new ResourceUri();
+ u1.setUri("u1");
+ u1.setResourceType(ResourceType.ARCHIVE);
+
+ ResourceUri u2 = new ResourceUri();
+ u2.setUri("u2");
+ u2.setResourceType(ResourceType.FILE);
+
+ List list = new ArrayList<>();
+ list.add(u1);list.add(u2);
+
+ String byteStr = Utilities.serializeResourceUris(list);
+ List res = Utilities.deserializeResourceUris(byteStr);
+
+ Assert.assertArrayEquals("The context should be the same", list.toArray(), res.toArray());
+ }
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/KryoHiveTest.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/KryoHiveTest.java
new file mode 100644
index 0000000..b491ddb
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/KryoHiveTest.java
@@ -0,0 +1,59 @@
+package org.apache.hadoop.hive.ql.exec.persistence;
+import java.beans.XMLDecoder;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.plan.MapredWork;
+
+import com.esotericsoftware.kryo.Kryo;
+import com.esotericsoftware.kryo.io.Input;
+import com.esotericsoftware.kryo.io.Output;
+
+/**
+ * Created by root on 2/9/15.
+ */
+public class KryoHiveTest {
+ static String planXML = "/localdisk/chengxu/code/another_hive/map.xml";
+ public static void main(String[] args) throws Exception{
+ if(args.length > 0)
+ planXML = args[0];
+ KryoHiveTest gt = new KryoHiveTest();
+ gt.fun();
+ }
+
+ private void fun() throws FileNotFoundException {
+ MapredWork work;
+ XMLDecoder d = null;
+ try {
+ System.out.println("planXML file :"+ planXML);
+ d = new XMLDecoder(new FileInputStream(planXML));
+ work = (MapredWork) d.readObject();
+ } finally {
+ if (null != d) {
+ d.close();
+ }
+ }
+
+ // System.out.println(work.getMapWork().getPathToAliases());
+ // System.out.println(work.getMapWork().getPathToPartitionInfo());
+ Kryo kryo = new Kryo();
+ ByteArrayOutputStream os = new ByteArrayOutputStream();
+ Output output = new Output(os);
+ kryo.writeObject(output, work);
+ output.flush();
+ output.close();
+
+ Input inp = new Input(new ByteArrayInputStream(os.toByteArray()));
+ work = kryo.readObject(inp,MapredWork.class);
+ inp.close();
+ //System.out.println(work.getMapWork().getPathToAliases());
+ //System.out.println(work.getMapWork().getPathToPartitionInfo());
+ }
+}
diff --git a/ql/src/test/queries/clientpositive/storage_handler_link_external_jar.q b/ql/src/test/queries/clientpositive/storage_handler_link_external_jar.q
new file mode 100644
index 0000000..095d370
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/storage_handler_link_external_jar.q
@@ -0,0 +1,19 @@
+set hive.plan.serialization.format=javaXML;
+
+create table table_with_external_storage_handler(id String, name string)
+stored by "org.apache.hadoop.hive.storagehandler.TestStorageHandler"
+USING JAR '${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar'
+with serdeproperties("mongo.column.mapping" = "id,name" )
+tblproperties ( "mongo.host" = "192.168.0.5");
+
+describe formatted table_with_external_storage_handler;
+
+create table table_like_external_storage_handler like table_with_external_storage_handler;
+
+describe formatted table_like_external_storage_handler;
+
+insert overwrite table table_with_external_storage_handler select * from src;
+insert overwrite table table_like_external_storage_handler select * from src;
+
+select count(*) from table_with_external_storage_handler;
+select count(*) from table_like_external_storage_handler;
\ No newline at end of file
diff --git a/ql/src/test/results/clientpositive/storage_handler_link_external_jar.q.out b/ql/src/test/results/clientpositive/storage_handler_link_external_jar.q.out
new file mode 100644
index 0000000..b9022b4
--- /dev/null
+++ b/ql/src/test/results/clientpositive/storage_handler_link_external_jar.q.out
@@ -0,0 +1,127 @@
+PREHOOK: query: create table table_with_external_storage_handler(id String, name string)
+stored by "org.apache.hadoop.hive.storagehandler.TestStorageHandler"
+USING JAR '/root/.m2/repository/org/apache/hive/hive-it-test-serde/1.2.0-SNAPSHOT/hive-it-test-serde-1.2.0-SNAPSHOT.jar'
+with serdeproperties("mongo.column.mapping" = "id,name" )
+tblproperties ( "mongo.host" = "192.168.0.5")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table_with_external_storage_handler
+POSTHOOK: query: create table table_with_external_storage_handler(id String, name string)
+stored by "org.apache.hadoop.hive.storagehandler.TestStorageHandler"
+USING JAR '/root/.m2/repository/org/apache/hive/hive-it-test-serde/1.2.0-SNAPSHOT/hive-it-test-serde-1.2.0-SNAPSHOT.jar'
+with serdeproperties("mongo.column.mapping" = "id,name" )
+tblproperties ( "mongo.host" = "192.168.0.5")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table_with_external_storage_handler
+PREHOOK: query: describe formatted table_with_external_storage_handler
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table_with_external_storage_handler
+POSTHOOK: query: describe formatted table_with_external_storage_handler
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table_with_external_storage_handler
+# col_name data_type comment
+
+id string
+name string
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ mongo.host 192.168.0.5
+ storage_handler org.apache.hadoop.hive.storagehandler.TestStorageHandler
+ storage_resources AQERAQEBAe0BL3Jvb3QvLm0yL3JlcG9zaXRvcnkvb3JnL2FwYWNoZS9oaXZlL2hpdmUtaXQtdGVzdC1zZXJkZS8xLjIuMC1TTkFQU0hPVC9oaXZlLWl0LXRlc3Qtc2VyZGUtMS4yLjAtU05BUFNIT1QuamFy
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: null
+OutputFormat: null
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ mongo.column.mapping id,name
+ serialization.format 1
+PREHOOK: query: create table table_like_external_storage_handler like table_with_external_storage_handler
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table_like_external_storage_handler
+POSTHOOK: query: create table table_like_external_storage_handler like table_with_external_storage_handler
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table_like_external_storage_handler
+PREHOOK: query: describe formatted table_like_external_storage_handler
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table_like_external_storage_handler
+POSTHOOK: query: describe formatted table_like_external_storage_handler
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table_like_external_storage_handler
+# col_name data_type comment
+
+id string
+name string
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ storage_handler org.apache.hadoop.hive.storagehandler.TestStorageHandler
+ storage_resources AQERAQEBAe0BL3Jvb3QvLm0yL3JlcG9zaXRvcnkvb3JnL2FwYWNoZS9oaXZlL2hpdmUtaXQtdGVzdC1zZXJkZS8xLjIuMC1TTkFQU0hPVC9oaXZlLWl0LXRlc3Qtc2VyZGUtMS4yLjAtU05BUFNIT1QuamFy
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: null
+OutputFormat: null
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ mongo.column.mapping id,name
+ serialization.format 1
+PREHOOK: query: insert overwrite table table_with_external_storage_handler select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table_with_external_storage_handler
+POSTHOOK: query: insert overwrite table table_with_external_storage_handler select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table_with_external_storage_handler
+PREHOOK: query: insert overwrite table table_like_external_storage_handler select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table_like_external_storage_handler
+POSTHOOK: query: insert overwrite table table_like_external_storage_handler select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table_like_external_storage_handler
+PREHOOK: query: select count(*) from table_with_external_storage_handler
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table_with_external_storage_handler
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from table_with_external_storage_handler
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table_with_external_storage_handler
+#### A masked pattern was here ####
+500
+PREHOOK: query: select count(*) from table_like_external_storage_handler
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table_like_external_storage_handler
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from table_like_external_storage_handler
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table_like_external_storage_handler
+#### A masked pattern was here ####
+500
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
index dda3c5f..1b708dd 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
@@ -528,7 +528,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, ThriftTestObj struc
struct.field3 = new ArrayList(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
- InnerStruct _elem2; // optional
+ InnerStruct _elem2; // required
_elem2 = new InnerStruct();
_elem2.read(iprot);
struct.field3.add(_elem2);
@@ -636,7 +636,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, ThriftTestObj struct
struct.field3 = new ArrayList(_list5.size);
for (int _i6 = 0; _i6 < _list5.size; ++_i6)
{
- InnerStruct _elem7; // optional
+ InnerStruct _elem7; // required
_elem7 = new InnerStruct();
_elem7.read(iprot);
struct.field3.add(_elem7);
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
index ff0c1f2..07ea8b9 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
@@ -1211,7 +1211,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, Complex struct) thr
struct.lint = new ArrayList(_list18.size);
for (int _i19 = 0; _i19 < _list18.size; ++_i19)
{
- int _elem20; // optional
+ int _elem20; // required
_elem20 = iprot.readI32();
struct.lint.add(_elem20);
}
@@ -1229,7 +1229,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, Complex struct) thr
struct.lString = new ArrayList(_list21.size);
for (int _i22 = 0; _i22 < _list21.size; ++_i22)
{
- String _elem23; // optional
+ String _elem23; // required
_elem23 = iprot.readString();
struct.lString.add(_elem23);
}
@@ -1247,7 +1247,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, Complex struct) thr
struct.lintString = new ArrayList(_list24.size);
for (int _i25 = 0; _i25 < _list24.size; ++_i25)
{
- IntString _elem26; // optional
+ IntString _elem26; // required
_elem26 = new IntString();
_elem26.read(iprot);
struct.lintString.add(_elem26);
@@ -1610,7 +1610,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, Complex struct) thro
struct.lint = new ArrayList(_list57.size);
for (int _i58 = 0; _i58 < _list57.size; ++_i58)
{
- int _elem59; // optional
+ int _elem59; // required
_elem59 = iprot.readI32();
struct.lint.add(_elem59);
}
@@ -1623,7 +1623,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, Complex struct) thro
struct.lString = new ArrayList(_list60.size);
for (int _i61 = 0; _i61 < _list60.size; ++_i61)
{
- String _elem62; // optional
+ String _elem62; // required
_elem62 = iprot.readString();
struct.lString.add(_elem62);
}
@@ -1636,7 +1636,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, Complex struct) thro
struct.lintString = new ArrayList(_list63.size);
for (int _i64 = 0; _i64 < _list63.size; ++_i64)
{
- IntString _elem65; // optional
+ IntString _elem65; // required
_elem65 = new IntString();
_elem65.read(iprot);
struct.lintString.add(_elem65);
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
index fba49e4..386fef9 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
@@ -2280,7 +2280,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, MegaStruct struct)
_val19 = new ArrayList(_list20.size);
for (int _i21 = 0; _i21 < _list20.size; ++_i21)
{
- String _elem22; // optional
+ String _elem22; // required
_elem22 = iprot.readString();
_val19.add(_elem22);
}
@@ -2310,7 +2310,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, MegaStruct struct)
_val26 = new ArrayList(_list27.size);
for (int _i28 = 0; _i28 < _list27.size; ++_i28)
{
- MiniStruct _elem29; // optional
+ MiniStruct _elem29; // required
_elem29 = new MiniStruct();
_elem29.read(iprot);
_val26.add(_elem29);
@@ -2333,7 +2333,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, MegaStruct struct)
struct.my_stringlist = new ArrayList(_list30.size);
for (int _i31 = 0; _i31 < _list30.size; ++_i31)
{
- String _elem32; // optional
+ String _elem32; // required
_elem32 = iprot.readString();
struct.my_stringlist.add(_elem32);
}
@@ -2351,7 +2351,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, MegaStruct struct)
struct.my_structlist = new ArrayList(_list33.size);
for (int _i34 = 0; _i34 < _list33.size; ++_i34)
{
- MiniStruct _elem35; // optional
+ MiniStruct _elem35; // required
_elem35 = new MiniStruct();
_elem35.read(iprot);
struct.my_structlist.add(_elem35);
@@ -2370,7 +2370,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, MegaStruct struct)
struct.my_enumlist = new ArrayList(_list36.size);
for (int _i37 = 0; _i37 < _list36.size; ++_i37)
{
- MyEnum _elem38; // optional
+ MyEnum _elem38; // required
_elem38 = MyEnum.findByValue(iprot.readI32());
struct.my_enumlist.add(_elem38);
}
@@ -2388,7 +2388,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, MegaStruct struct)
struct.my_stringset = new HashSet(2*_set39.size);
for (int _i40 = 0; _i40 < _set39.size; ++_i40)
{
- String _elem41; // optional
+ String _elem41; // required
_elem41 = iprot.readString();
struct.my_stringset.add(_elem41);
}
@@ -2406,7 +2406,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, MegaStruct struct)
struct.my_enumset = new HashSet(2*_set42.size);
for (int _i43 = 0; _i43 < _set42.size; ++_i43)
{
- MyEnum _elem44; // optional
+ MyEnum _elem44; // required
_elem44 = MyEnum.findByValue(iprot.readI32());
struct.my_enumset.add(_elem44);
}
@@ -2424,7 +2424,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, MegaStruct struct)
struct.my_structset = new HashSet(2*_set45.size);
for (int _i46 = 0; _i46 < _set45.size; ++_i46)
{
- MiniStruct _elem47; // optional
+ MiniStruct _elem47; // required
_elem47 = new MiniStruct();
_elem47.read(iprot);
struct.my_structset.add(_elem47);
@@ -3023,7 +3023,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, MegaStruct struct) t
_val95 = new ArrayList(_list96.size);
for (int _i97 = 0; _i97 < _list96.size; ++_i97)
{
- String _elem98; // optional
+ String _elem98; // required
_elem98 = iprot.readString();
_val95.add(_elem98);
}
@@ -3047,7 +3047,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, MegaStruct struct) t
_val102 = new ArrayList(_list103.size);
for (int _i104 = 0; _i104 < _list103.size; ++_i104)
{
- MiniStruct _elem105; // optional
+ MiniStruct _elem105; // required
_elem105 = new MiniStruct();
_elem105.read(iprot);
_val102.add(_elem105);
@@ -3064,7 +3064,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, MegaStruct struct) t
struct.my_stringlist = new ArrayList(_list106.size);
for (int _i107 = 0; _i107 < _list106.size; ++_i107)
{
- String _elem108; // optional
+ String _elem108; // required
_elem108 = iprot.readString();
struct.my_stringlist.add(_elem108);
}
@@ -3077,7 +3077,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, MegaStruct struct) t
struct.my_structlist = new ArrayList(_list109.size);
for (int _i110 = 0; _i110 < _list109.size; ++_i110)
{
- MiniStruct _elem111; // optional
+ MiniStruct _elem111; // required
_elem111 = new MiniStruct();
_elem111.read(iprot);
struct.my_structlist.add(_elem111);
@@ -3091,7 +3091,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, MegaStruct struct) t
struct.my_enumlist = new ArrayList(_list112.size);
for (int _i113 = 0; _i113 < _list112.size; ++_i113)
{
- MyEnum _elem114; // optional
+ MyEnum _elem114; // required
_elem114 = MyEnum.findByValue(iprot.readI32());
struct.my_enumlist.add(_elem114);
}
@@ -3104,7 +3104,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, MegaStruct struct) t
struct.my_stringset = new HashSet(2*_set115.size);
for (int _i116 = 0; _i116 < _set115.size; ++_i116)
{
- String _elem117; // optional
+ String _elem117; // required
_elem117 = iprot.readString();
struct.my_stringset.add(_elem117);
}
@@ -3117,7 +3117,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, MegaStruct struct) t
struct.my_enumset = new HashSet(2*_set118.size);
for (int _i119 = 0; _i119 < _set118.size; ++_i119)
{
- MyEnum _elem120; // optional
+ MyEnum _elem120; // required
_elem120 = MyEnum.findByValue(iprot.readI32());
struct.my_enumset.add(_elem120);
}
@@ -3130,7 +3130,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, MegaStruct struct) t
struct.my_structset = new HashSet(2*_set121.size);
for (int _i122 = 0; _i122 < _set121.size; ++_i122)
{
- MiniStruct _elem123; // optional
+ MiniStruct _elem123; // required
_elem123 = new MiniStruct();
_elem123.read(iprot);
struct.my_structset.add(_elem123);
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
index a50a508..aa56dc9 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
@@ -300,7 +300,7 @@ protected Object standardSchemeReadValue(org.apache.thrift.protocol.TProtocol ip
lString = new ArrayList(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
- String _elem2; // optional
+ String _elem2; // required
_elem2 = iprot.readString();
lString.add(_elem2);
}
@@ -423,7 +423,7 @@ protected Object tupleSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot
lString = new ArrayList(_list9.size);
for (int _i10 = 0; _i10 < _list9.size; ++_i10)
{
- String _elem11; // optional
+ String _elem11; // required
_elem11 = iprot.readString();
lString.add(_elem11);
}
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
index 334d225..676f2b2 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
@@ -431,7 +431,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, SetIntString struct
struct.sIntString = new HashSet(2*_set82.size);
for (int _i83 = 0; _i83 < _set82.size; ++_i83)
{
- IntString _elem84; // optional
+ IntString _elem84; // required
_elem84 = new IntString();
_elem84.read(iprot);
struct.sIntString.add(_elem84);
@@ -530,7 +530,7 @@ public void read(org.apache.thrift.protocol.TProtocol prot, SetIntString struct)
struct.sIntString = new HashSet(2*_set87.size);
for (int _i88 = 0; _i88 < _set87.size; ++_i88)
{
- IntString _elem89; // optional
+ IntString _elem89; // required
_elem89 = new IntString();
_elem89.read(iprot);
struct.sIntString.add(_elem89);
diff --git a/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote b/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote
old mode 100644
new mode 100755
diff --git a/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote b/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote
old mode 100644
new mode 100755