diff --git build.properties build.properties index fd61db1..a0214a7 100644 --- build.properties +++ build.properties @@ -72,8 +72,8 @@ jsp.test.jar=${hadoop.root}/lib/jetty-ext/jsp-api.jar common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar # module names needed for build process -iterate.hive.all=ant,shims,common,serde,metastore,hcatalog,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils -iterate.hive.modules=shims,common,serde,metastore,hcatalog,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils +iterate.hive.all=ant,shims,common,serde,metastore,hcatalog,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils +iterate.hive.modules=shims,common,serde,metastore,hcatalog,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,metastore,hcatalog,odbc,serde,service iterate.hive.thrift=ql,service,metastore,hcatalog,serde iterate.hive.protobuf=ql diff --git build.xml build.xml index 80913eb..23d1709 100644 --- build.xml +++ build.xml @@ -385,7 +385,6 @@ - @@ -402,9 +401,6 @@ - - - @@ -492,9 +488,6 @@ - - - @@ -517,14 +510,6 @@ - - - - - - @@ -683,8 +668,6 @@ - - @@ -954,7 +937,6 @@ - @@ -964,14 +946,12 @@ - - @@ -980,7 +960,6 @@ - @@ -1065,8 +1044,6 @@ todir="${mvn.jar.dir}" /> - - - - - - - @@ -1237,9 +1205,6 @@ - - - @@ -1383,16 +1348,6 @@ output.file="${mvn.pom.dir}/hive-hcatalog-${version}.pom.asc" gpg.passphrase="${gpg.passphrase}"/> - - - - - - - - - - - - - - - - - - - - diff --git builtins/build.xml builtins/build.xml deleted file mode 100644 index 639828a..0000000 --- builtins/build.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git builtins/ivy.xml builtins/ivy.xml deleted file mode 100644 index 98df447..0000000 --- builtins/ivy.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - The Apache Hive (TM) data warehouse software facilitates querying and managing large datasets residing in distributed storage. - https://cwiki.apache.org/confluence/display/Hive/Home - - - - - - - - - - - - - - - - - diff --git builtins/src/org/apache/hive/builtins/BuiltinUtils.java builtins/src/org/apache/hive/builtins/BuiltinUtils.java deleted file mode 100644 index 3149a4b..0000000 --- builtins/src/org/apache/hive/builtins/BuiltinUtils.java +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hive.builtins; - -/** - * Utility method class supporting builtin functions. We also - * use this class as a well-known name for loading metadata from - * the builtins jar. - */ -public abstract class BuiltinUtils -{ -} diff --git builtins/src/org/apache/hive/builtins/UDAFUnionMap.java builtins/src/org/apache/hive/builtins/UDAFUnionMap.java deleted file mode 100644 index 084b6c9..0000000 --- builtins/src/org/apache/hive/builtins/UDAFUnionMap.java +++ /dev/null @@ -1,127 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hive.builtins; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.hive.pdk.HivePdkUnitTest; -import org.apache.hive.pdk.HivePdkUnitTests; - -import org.apache.hadoop.hive.ql.exec.Description; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.udf.generic.AbstractGenericUDAFResolver; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer; -import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; - -/** - * Aggregate all maps into a single map. If there are multiple values for - * the same key, result can contain any of those values. - * Because the mappers must keep all of the data in memory, if your data is - * non-trivially large you should set hive.map.aggr=false to ensure that - * UNION_MAP is only executed in the reduce phase. - */ -@HivePdkUnitTests( - setup = "", - cleanup = "", - cases = { - @HivePdkUnitTest( - query = "SELECT size(UNION_MAP(MAP(sepal_width, sepal_length))) " - +"FROM iris", - result = "23") - }) -@Description( - name = "union_map", - value = "_FUNC_(col) - aggregate given maps into a single map", - extended = "Aggregate maps, returns as a HashMap.") -public class UDAFUnionMap extends AbstractGenericUDAFResolver { - @Override - public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { - - // Next two validation calls are dependent on HIVE-2524, so - // leave them commented out for now. - // - // new LengthEquals(1).check(parameters.length); - // new IsMap().check(parameters[0], 0); - - return new Evaluator(); - } - - public static class State implements AggregationBuffer { - HashMap map = new HashMap(); - } - - public static class Evaluator extends GenericUDAFEvaluator { - ObjectInspector inputOI; - MapObjectInspector internalMergeOI; - - @Override - public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException { - super.init(m, parameters); - if (m == Mode.COMPLETE || m == Mode.PARTIAL1) { - inputOI = (MapObjectInspector) parameters[0]; - } else { - internalMergeOI = (MapObjectInspector) parameters[0]; - } - return ObjectInspectorUtils.getStandardObjectInspector(parameters[0]); - } - - @Override - public AggregationBuffer getNewAggregationBuffer() throws HiveException { - return new State(); - } - - @Override - public void iterate(AggregationBuffer agg, Object[] input) throws HiveException { - if (input[0] != null) { - State state = (State) agg; - state.map.putAll((Map)ObjectInspectorUtils.copyToStandardObject(input[0], inputOI)); - } - } - - @Override - public void merge(AggregationBuffer agg, Object partial) throws HiveException { - if (partial != null) { - State state = (State) agg; - Map pset = (Map)ObjectInspectorUtils.copyToStandardObject(partial, internalMergeOI); - state.map.putAll(pset); - } - } - - @Override - public void reset(AggregationBuffer agg) throws HiveException { - ((State) agg).map.clear(); - } - - @Override - public Object terminate(AggregationBuffer agg) throws HiveException { - return ((State) agg).map; - } - - @Override - public Object terminatePartial(AggregationBuffer agg) throws HiveException { - return ((State) agg).map; - } - } -} diff --git builtins/test/cleanup.sql builtins/test/cleanup.sql deleted file mode 100644 index 23d4ea0..0000000 --- builtins/test/cleanup.sql +++ /dev/null @@ -1,2 +0,0 @@ -drop table if exists onerow; -drop table if exists iris; diff --git builtins/test/iris.txt builtins/test/iris.txt deleted file mode 100644 index dc18449..0000000 --- builtins/test/iris.txt +++ /dev/null @@ -1,150 +0,0 @@ -6 2.9 4.5 1.5 versicolor -6.9 3.1 5.1 2.3 virginica -5.4 3.4 1.5 0.4 setosa -5.5 3.5 1.3 0.2 setosa -4.6 3.1 1.5 0.2 setosa -4.4 2.9 1.4 0.2 setosa -5.9 3 4.2 1.5 versicolor -5.1 3.8 1.6 0.2 setosa -5 3.6 1.4 0.2 setosa -6.7 3.1 5.6 2.4 virginica -4.9 3.6 1.4 0.1 setosa -4.9 2.5 4.5 1.7 virginica -5.8 2.8 5.1 2.4 virginica -4.5 2.3 1.3 0.3 setosa -6.7 3 5.2 2.3 virginica -6 3 4.8 1.8 virginica -6.4 3.2 4.5 1.5 versicolor -7.7 3.8 6.7 2.2 virginica -5.7 2.6 3.5 1 versicolor -5.7 2.9 4.2 1.3 versicolor -5.4 3.9 1.7 0.4 setosa -5 3.5 1.3 0.3 setosa -6 2.2 5 1.5 virginica -6.6 2.9 4.6 1.3 versicolor -5.9 3 5.1 1.8 virginica -4.7 3.2 1.6 0.2 setosa -5 3.4 1.5 0.2 setosa -6.9 3.2 5.7 2.3 virginica -6.5 3 5.8 2.2 virginica -5.5 4.2 1.4 0.2 setosa -7.1 3 5.9 2.1 virginica -6.2 2.9 4.3 1.3 versicolor -5.7 4.4 1.5 0.4 setosa -4.9 2.4 3.3 1 versicolor -5.4 3.9 1.3 0.4 setosa -6.4 2.8 5.6 2.1 virginica -6.9 3.1 4.9 1.5 versicolor -5.9 3.2 4.8 1.8 versicolor -6.1 3 4.9 1.8 virginica -5.6 3 4.5 1.5 versicolor -5.3 3.7 1.5 0.2 setosa -6.4 2.9 4.3 1.3 versicolor -6.6 3 4.4 1.4 versicolor -6.4 3.1 5.5 1.8 virginica -6.3 2.5 4.9 1.5 versicolor -6.7 3.3 5.7 2.5 virginica -6.4 2.7 5.3 1.9 virginica -6.5 3 5.5 1.8 virginica -6.7 3.1 4.4 1.4 versicolor -4.9 3 1.4 0.2 setosa -6.2 2.8 4.8 1.8 virginica -7.6 3 6.6 2.1 virginica -5.2 3.5 1.5 0.2 setosa -7.7 2.6 6.9 2.3 virginica -6.5 3.2 5.1 2 virginica -5.8 2.6 4 1.2 versicolor -6.3 2.9 5.6 1.8 virginica -5.8 4 1.2 0.2 setosa -6.5 2.8 4.6 1.5 versicolor -5 3.2 1.2 0.2 setosa -7.4 2.8 6.1 1.9 virginica -7 3.2 4.7 1.4 versicolor -4.8 3.4 1.6 0.2 setosa -6.4 2.8 5.6 2.2 virginica -5.4 3.7 1.5 0.2 setosa -5.7 3.8 1.7 0.3 setosa -5.2 2.7 3.9 1.4 versicolor -6.3 3.3 6 2.5 virginica -5.1 2.5 3 1.1 versicolor -7.7 2.8 6.7 2 virginica -5.5 2.4 3.7 1 versicolor -7.3 2.9 6.3 1.8 virginica -4.3 3 1.1 0.1 setosa -5.6 2.7 4.2 1.3 versicolor -7.9 3.8 6.4 2 virginica -5.6 2.8 4.9 2 virginica -5 3 1.6 0.2 setosa -6.1 3 4.6 1.4 versicolor -5.1 3.5 1.4 0.2 setosa -5.1 3.4 1.5 0.2 setosa -4.8 3.4 1.9 0.2 setosa -4.6 3.4 1.4 0.3 setosa -6.5 3 5.2 2 virginica -5.1 3.8 1.9 0.4 setosa -5.6 2.9 3.6 1.3 versicolor -6.7 3 5 1.7 versicolor -7.2 3.6 6.1 2.5 virginica -4.9 3.1 1.5 0.1 setosa -6.2 3.4 5.4 2.3 virginica -5.8 2.7 5.1 1.9 virginica -5.6 2.5 3.9 1.1 versicolor -6.3 2.5 5 1.9 virginica -4.6 3.2 1.4 0.2 setosa -5.8 2.7 5.1 1.9 virginica -5.5 2.3 4 1.3 versicolor -6.7 3.3 5.7 2.1 virginica -6 2.2 4 1 versicolor -5.1 3.7 1.5 0.4 setosa -5 3.5 1.6 0.6 setosa -5.8 2.7 4.1 1 versicolor -5.7 2.8 4.1 1.3 versicolor -6 3.4 4.5 1.6 versicolor -6.1 2.9 4.7 1.4 versicolor -5.2 3.4 1.4 0.2 setosa -5.6 3 4.1 1.3 versicolor -6.8 2.8 4.8 1.4 versicolor -5.8 2.7 3.9 1.2 versicolor -4.8 3.1 1.6 0.2 setosa -5.1 3.5 1.4 0.3 setosa -5 3.3 1.4 0.2 setosa -7.2 3.2 6 1.8 virginica -6.7 3.1 4.7 1.5 versicolor -6.8 3.2 5.9 2.3 virginica -4.6 3.6 1 0.2 setosa -5.4 3 4.5 1.5 versicolor -6.4 3.2 5.3 2.3 virginica -4.8 3 1.4 0.1 setosa -5.5 2.5 4 1.3 versicolor -6.9 3.1 5.4 2.1 virginica -6.8 3 5.5 2.1 virginica -4.8 3 1.4 0.3 setosa -6.3 2.7 4.9 1.8 virginica -6 2.7 5.1 1.6 versicolor -6.1 2.6 5.6 1.4 virginica -7.2 3 5.8 1.6 virginica -5 2.3 3.3 1 versicolor -6.3 3.3 4.7 1.6 versicolor -6.3 2.8 5.1 1.5 virginica -4.7 3.2 1.3 0.2 setosa -6.3 3.4 5.6 2.4 virginica -5.7 2.8 4.5 1.3 versicolor -5.7 3 4.2 1.2 versicolor -4.9 3.1 1.5 0.2 setosa -5 2 3.5 1 versicolor -7.7 3 6.1 2.3 virginica -5.7 2.5 5 2 virginica -5.5 2.4 3.8 1.1 versicolor -5.1 3.3 1.7 0.5 setosa -6.3 2.3 4.4 1.3 versicolor -4.4 3 1.3 0.2 setosa -6.1 2.8 4 1.3 versicolor -6.1 2.8 4.7 1.2 versicolor -5.5 2.6 4.4 1.2 versicolor -5.1 3.8 1.5 0.3 setosa -4.4 3.2 1.3 0.2 setosa -5 3.4 1.6 0.4 setosa -6.7 2.5 5.8 1.8 virginica -5.2 4.1 1.5 0.1 setosa -6.2 2.2 4.5 1.5 versicolor -5.4 3.4 1.7 0.2 setosa diff --git builtins/test/onerow.txt builtins/test/onerow.txt deleted file mode 100644 index 857d7a0..0000000 --- builtins/test/onerow.txt +++ /dev/null @@ -1 +0,0 @@ -plugh diff --git builtins/test/setup.sql builtins/test/setup.sql deleted file mode 100644 index 6aa93ba..0000000 --- builtins/test/setup.sql +++ /dev/null @@ -1,10 +0,0 @@ -create table onerow(s string); -load data local inpath '${env:HIVE_PLUGIN_ROOT_DIR}/test/onerow.txt' -overwrite into table onerow; -create table iris( -sepal_length string, sepal_width string, -petal_length string, petal_width string, -species string) -row format delimited fields terminated by '\t' stored as textfile; -load data local inpath '${env:HIVE_PLUGIN_ROOT_DIR}/test/iris.txt' -overwrite into table iris; diff --git pdk/build.xml pdk/build.xml deleted file mode 100644 index 0893f47..0000000 --- pdk/build.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git pdk/ivy.xml pdk/ivy.xml deleted file mode 100644 index 2b101c8..0000000 --- pdk/ivy.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - The Apache Hive (TM) data warehouse software facilitates querying and managing large datasets residing in distributed storage. - https://cwiki.apache.org/confluence/display/Hive/Home - - - - - - - - - - - - - - - - - diff --git pdk/scripts/README pdk/scripts/README deleted file mode 100644 index 289368e..0000000 --- pdk/scripts/README +++ /dev/null @@ -1,3 +0,0 @@ -Note that this directory contains scripts which are bundled into the -Plugin Development Kit (rather than used as part of the Hive build -itself). diff --git pdk/scripts/build-plugin.xml pdk/scripts/build-plugin.xml deleted file mode 100644 index a4f0640..0000000 --- pdk/scripts/build-plugin.xml +++ /dev/null @@ -1,159 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git pdk/scripts/class-registration.xsl pdk/scripts/class-registration.xsl deleted file mode 100644 index ce2ea95..0000000 --- pdk/scripts/class-registration.xsl +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - - CREATE TEMPORARY FUNCTION - - - AS ' - - '; - - - - - - - - - diff --git pdk/scripts/conf/hive-site.xml pdk/scripts/conf/hive-site.xml deleted file mode 100644 index dab494e..0000000 --- pdk/scripts/conf/hive-site.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - diff --git pdk/scripts/conf/log4j.properties pdk/scripts/conf/log4j.properties deleted file mode 100644 index a4771b7..0000000 --- pdk/scripts/conf/log4j.properties +++ /dev/null @@ -1,82 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This file is to configure hadoop log4j -# Define some default values that can be overridden by system properties -hadoop.root.logger=DEBUG,DRFA -hadoop.log.dir=/tmp/ -hadoop.log.file=hadoop.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hadoop.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# Null Appender -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# Daily Rolling File Appender -# - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA -log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA - diff --git pdk/src/java/org/apache/hive/pdk/FunctionExtractor.java pdk/src/java/org/apache/hive/pdk/FunctionExtractor.java deleted file mode 100644 index d22c13f..0000000 --- pdk/src/java/org/apache/hive/pdk/FunctionExtractor.java +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.pdk; - -import org.apache.hadoop.hive.ql.exec.Description; - -public class FunctionExtractor { - public static void main(String [] args) throws Exception { - System.out.println(""); - for (String arg : args) { - Class c = Class.forName(arg); - Description d = c.getAnnotation(Description.class); - if (d == null) { - continue; - } - System.out.print(" "); - } - System.out.println(""); - } -} diff --git pdk/src/java/org/apache/hive/pdk/HivePdkUnitTest.java pdk/src/java/org/apache/hive/pdk/HivePdkUnitTest.java deleted file mode 100644 index 34c6b72..0000000 --- pdk/src/java/org/apache/hive/pdk/HivePdkUnitTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.pdk; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -/** - * Defines one Hive plugin unit test. - */ -public @interface HivePdkUnitTest { - - /** - * Hive query to run for this test. - */ - String query(); - - /** - * Expected result from query. - */ - String result(); -} diff --git pdk/src/java/org/apache/hive/pdk/HivePdkUnitTests.java pdk/src/java/org/apache/hive/pdk/HivePdkUnitTests.java deleted file mode 100644 index d85f8c7..0000000 --- pdk/src/java/org/apache/hive/pdk/HivePdkUnitTests.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.pdk; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -/** - * Defines a suite of Hive plugin unit tests. - */ -public @interface HivePdkUnitTests { - - /** - * Hive commands (semicolon-separated) to run as suite cleanup. - */ - String cleanup() default ""; - - /** - * Hive commands (semicolon-separated) to run as suite setup. - */ - String setup() default ""; - - /** - * Hive plugin unit tests in this suite. - */ - HivePdkUnitTest[] cases(); -} diff --git pdk/src/java/org/apache/hive/pdk/PluginTest.java pdk/src/java/org/apache/hive/pdk/PluginTest.java deleted file mode 100644 index b475eb4..0000000 --- pdk/src/java/org/apache/hive/pdk/PluginTest.java +++ /dev/null @@ -1,200 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.pdk; - -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileReader; -import java.io.InputStreamReader; -import java.io.IOException; -import java.io.PrintStream; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import junit.extensions.TestSetup; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; - -import org.apache.hadoop.hive.cli.CliDriver; - -/** - * PluginTest is a test harness for invoking all of the unit tests - * annotated on the classes in a plugin. - */ -public class PluginTest extends TestCase { - - private HivePdkUnitTest unitTest; - - private PluginTest(HivePdkUnitTest unitTest) { - super(unitTest.query()); - this.unitTest = unitTest; - } - - public void runTest() throws Exception { - String output = runHive( - "-i", - "metadata/add-jar.sql", - "-e", - unitTest.query()); - assertEquals(unitTest.result(), output); - } - - static String runHive(String ... args) throws Exception { - ByteArrayOutputStream outBytes = new ByteArrayOutputStream(); - ByteArrayOutputStream errBytes = new ByteArrayOutputStream(); - PrintStream outSaved = System.out; - PrintStream errSaved = System.err; - System.setOut(new PrintStream(outBytes, true)); - System.setErr(new PrintStream(errBytes, true)); - try { - CliDriver.run(args); - } finally { - System.setOut(outSaved); - System.setErr(errSaved); - } - ByteArrayInputStream outBytesIn = - new ByteArrayInputStream(outBytes.toByteArray()); - ByteArrayInputStream errBytesIn = - new ByteArrayInputStream(errBytes.toByteArray()); - BufferedReader is = - new BufferedReader(new InputStreamReader(outBytesIn)); - BufferedReader es = - new BufferedReader(new InputStreamReader(errBytesIn)); - StringBuilder output = new StringBuilder(); - String line; - while ((line = is.readLine()) != null) { - if (output.length() > 0) { - output.append("\n"); - } - output.append(line); - } - if (output.length() == 0) { - output = new StringBuilder(); - while ((line = es.readLine()) != null) { - output.append("\n"); - output.append(line); - } - } - return output.toString(); - } - - public static Test suite() throws Exception { - String classList = System.getProperty("hive.plugin.class.list"); - String [] classNames = classList.split(" "); - TestSuite suite = new TestSuite("Plugin Tests"); - for (String className : classNames) { - Class c = Class.forName(className); - HivePdkUnitTests tests = c.getAnnotation(HivePdkUnitTests.class); - if (tests == null) { - continue; - } - TestSuite classSuite = new TestSuite(c.getName()); - for (HivePdkUnitTest unitTest : tests.cases()) { - classSuite.addTest(new PluginTest(unitTest)); - } - suite.addTest(new PluginTestSetup(classSuite, tests)); - } - - return new PluginGlobalSetup(suite); - } - - public static void main(String [] args) throws Exception { - junit.textui.TestRunner.run(suite()); - } - - public static class PluginTestSetup extends TestSetup { - String name; - HivePdkUnitTests unitTests; - - PluginTestSetup(TestSuite test, HivePdkUnitTests unitTests) { - super(test); - this.name = test.getName(); - this.unitTests = unitTests; - } - - protected void setUp() throws Exception { - String cleanup = unitTests.cleanup(); - String setup = unitTests.setup(); - if (cleanup == null) { - cleanup = ""; - } - if (setup == null) { - setup = ""; - } - if ((cleanup.length() > 0) || (setup.length() > 0)) { - String result = runHive( - "-e", - cleanup + "\n" + setup); - if (result.length() > 0) { - System.err.println(name + " SETUP: " + result); - } - } - } - - protected void tearDown() throws Exception { - String cleanup = unitTests.cleanup(); - if (cleanup != null) { - String result = runHive( - "-e", - cleanup); - if (result.length() > 0) { - System.err.println(name + " TEARDOWN: " + result); - } - } - } - } - - public static class PluginGlobalSetup extends TestSetup { - private File testScriptDir; - - PluginGlobalSetup(Test test) { - super(test); - testScriptDir = - new File(System.getProperty("hive.plugin.root.dir"), "test"); - } - - protected void setUp() throws Exception { - String result = runHive( - "-i", - new File(testScriptDir, "cleanup.sql").toString(), - "-i", - "metadata/add-jar.sql", - "-i", - "metadata/class-registration.sql", - "-f", - new File(testScriptDir, "setup.sql").toString()); - if (result.length() > 0) { - System.err.println("GLOBAL SETUP: " + result); - } - } - - protected void tearDown() throws Exception { - String result = runHive( - "-f", - new File(testScriptDir, "cleanup.sql").toString()); - if (result.length() > 0) { - System.err.println("GLOBAL TEARDOWN: " + result); - } - } - } -} diff --git pdk/test-plugin/build.xml pdk/test-plugin/build.xml deleted file mode 100644 index 117d2cd..0000000 --- pdk/test-plugin/build.xml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - diff --git pdk/test-plugin/src/org/apache/hive/pdktest/Rot13.java pdk/test-plugin/src/org/apache/hive/pdktest/Rot13.java deleted file mode 100644 index 05aea66..0000000 --- pdk/test-plugin/src/org/apache/hive/pdktest/Rot13.java +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.pdktest; - -import org.apache.hive.pdk.HivePdkUnitTest; -import org.apache.hive.pdk.HivePdkUnitTests; - -import org.apache.hadoop.hive.ql.exec.Description; -import org.apache.hadoop.hive.ql.exec.UDF; -import org.apache.hadoop.io.Text; - -/** - * Example UDF for rot13 transformation. - */ -@Description(name = "rot13", - value = "_FUNC_(str) - Returns str with all characters transposed via rot13", - extended = "Example:\n" - + " > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n" + " 'Snprobbx'") -@HivePdkUnitTests( - setup = "create table rot13_data(s string); " - + "insert overwrite table rot13_data select 'Facebook' from onerow;", - cleanup = "drop table if exists rot13_data;", - cases = { - @HivePdkUnitTest( - query = "SELECT tp_rot13('Mixed Up!') FROM onerow;", - result = "Zvkrq Hc!"), - @HivePdkUnitTest( - query = "SELECT tp_rot13(s) FROM rot13_data;", - result = "Snprobbx") - } - ) -public class Rot13 extends UDF { - private Text t = new Text(); - - public Rot13() { - } - - public Text evaluate(Text s) { - StringBuilder out = new StringBuilder(s.getLength()); - char[] ca = s.toString().toCharArray(); - for (char c : ca) { - if (c >= 'a' && c <= 'm') { - c += 13; - } else if (c >= 'n' && c <= 'z') { - c -= 13; - } else if (c >= 'A' && c <= 'M') { - c += 13; - } else if (c >= 'N' && c <= 'Z') { - c -= 13; - } - out.append(c); - } - t.set(out.toString()); - return t; - } -} diff --git pdk/test-plugin/test/cleanup.sql pdk/test-plugin/test/cleanup.sql deleted file mode 100644 index 087a3ba..0000000 --- pdk/test-plugin/test/cleanup.sql +++ /dev/null @@ -1 +0,0 @@ -drop table if exists onerow; diff --git pdk/test-plugin/test/onerow.txt pdk/test-plugin/test/onerow.txt deleted file mode 100644 index 857d7a0..0000000 --- pdk/test-plugin/test/onerow.txt +++ /dev/null @@ -1 +0,0 @@ -plugh diff --git pdk/test-plugin/test/setup.sql pdk/test-plugin/test/setup.sql deleted file mode 100644 index a199511..0000000 --- pdk/test-plugin/test/setup.sql +++ /dev/null @@ -1,3 +0,0 @@ -create table onerow(s string); -load data local inpath '${env:HIVE_PLUGIN_ROOT_DIR}/test/onerow.txt' -overwrite into table onerow; diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index bcd4d4d..d8c91bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -191,19 +191,6 @@ public SessionState(HiveConf conf) { ls = new LineageState(); overriddenConfigurations = new HashMap(); overriddenConfigurations.putAll(HiveConf.getConfSystemProperties()); - - // Register the Hive builtins jar and all of its functions - try { - Class pluginClass = Utilities.getBuiltinUtilsClass(); - URL jarLocation = pluginClass.getProtectionDomain().getCodeSource() - .getLocation(); - add_builtin_resource( - ResourceType.JAR, jarLocation.toString()); - FunctionRegistry.registerFunctionsFromPluginJar( - jarLocation, pluginClass.getClassLoader()); - } catch (Exception ex) { - throw new RuntimeException("Failed to load Hive builtin functions", ex); - } } public void setCmd(String cmdString) {