diff --git hcatalog/src/test/e2e/templeton/README.txt hcatalog/src/test/e2e/templeton/README.txt
index 785a073..bf2843c 100644
--- hcatalog/src/test/e2e/templeton/README.txt
+++ hcatalog/src/test/e2e/templeton/README.txt
@@ -19,11 +19,13 @@ End to end tests
---------------
End to end tests in templeton runs tests against an existing templeton server.
It runs hcat, mapreduce, streaming, hive and pig tests.
+This requires Hadoop cluster and Hive metastore running.
It's a good idea to look at current versions of
-http://hive.apache.org/docs/hcat_r0.5.0/rest_server_install.html and
-http://hive.apache.org/docs/hcat_r0.5.0/configuration.html before proceeding.
+https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat and
+https://cwiki.apache.org/confluence/display/Hive/WebHCat+Configure
+See deployers/README.txt for help automating some of the steps in this document.
(Note that by default, webhcat-default.xml templeton.hive.properties sets
hive.metastore.uris=thrift://localhost:9933, thus WebHCat will expect
@@ -39,6 +41,11 @@ to control which DB the metastore uses put something like
Controls which DB engine metastore will use for persistence. In particular,
where Derby will create it's data files.
+
+ hive.metastore.uris
+ thrift://localhost:9933
+ For Hive CLI to connect to
+
in hive-site.xml
)
@@ -91,13 +98,36 @@ Tips:
be obtained from Pig and the other two are obtained from your Hadoop distribution.
For Hadoop 1.x you would need to upload hadoop-examples.jar twice to HDFS one as hclient.jar and other as hexamples.jar.
For Hadoop 2.x you would need to upload hadoop-mapreduce-client-jobclient.jar to HDFS as hclient.jar and hadoop-mapreduce-examples.jar to HDFS as hexamples.jar.
-Also see http://hive.apache.org/docs/hcat_r0.5.0/rest_server_install.html#Hadoop+Distributed+Cache for notes on
-additional JAR files to copy to HDFS.
+Also see https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat#WebHCatInstallWebHCat-HadoopDistributedCache
+ for notes on additional JAR files to copy to HDFS.
5. Make sure TEMPLETON_HOME evnironment variable is set
6. hadoop/conf/core-site.xml should have items described in
-http://hive.apache.org/docs/hcat_r0.5.0/rest_server_install.html#Permissions
+https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat#WebHCatInstallWebHCat-Permissions
+
+7. Currently Pig tar file available on http://pig.apache.org/ contains jar files compiled to work with Hadoop 1.x.
+To run WebHCat tests on Hadoop 2.x you need to build your own Pig tar for Hadoop 2. To do that download the
+Pig source distribution and build it with "ant -Dforrest.home=$FORREST_HOME -Dhadoopversion=23 clean tar"
+You may also need to adjust the following in Pig's build.xml as needed:
+
+
+
+8. Enable webhdfs by adding the following to your hadoop hdfs-site.xml :
+
+ dfs.webhdfs.enabled
+ true
+
+
+ dfs.http.address
+ 127.0.0.1:8085
+ true
+
+
+****
+**** See deployers/ for scripts that automate a lot of the set up.
+****
+
Running the tests
-----------------
@@ -174,25 +204,6 @@ and the folder hdfs://hostname:8020/sqoopoutputdir doesn't exist before running
Notes
-----
-
-
-
-Enable webhdfs by adding the following to your hadoop hdfs-site.xml :
-
-
- dfs.webhdfs.enabled
- true
-
-
- dfs.http.address
- 127.0.0.1:8085
- true
-
-
-You can build a server that will measure test coverage by using templeton:
-ant clean; ant e2e
-This assumes you've got webhdfs at the address above, the inpdir info in /user/templeton, and templeton running on the default port. You can change any of those properties in the build file.
-
It's best to set HADOOP_HOME_WARN_SUPPRESS=true everywhere you can.
Also useful to add to conf/hadoop-env.sh
export HADOOP_OPTS="-Djava.security.krb5.realm=OX.AC.UK -Djava.security.krb5.kdc=kdc0.ox.ac.uk:kdc1.ox.ac.uk"
diff --git hcatalog/src/test/e2e/templeton/build.xml hcatalog/src/test/e2e/templeton/build.xml
index 6d6244b..c8eb94d 100644
--- hcatalog/src/test/e2e/templeton/build.xml
+++ hcatalog/src/test/e2e/templeton/build.xml
@@ -87,8 +87,8 @@
if group=3, then 3 .conf files will be processed in parallel
if conf.file=2 there will be 2 thread per .conf file, each thread
executing a single group (identified by 'name' element) -->
-
-
+
+
diff --git hcatalog/src/test/e2e/templeton/deployers/README.txt hcatalog/src/test/e2e/templeton/deployers/README.txt
new file mode 100644
index 0000000..67341aa
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/README.txt
@@ -0,0 +1,55 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+Overview
+This directory contains a set of scripts that make running WebHCat e2e tests easier. These scripts
+help ensure that all the necessary artifacts for e2e tests are deployed to the cluster and speed up
+code-compile-test loop in Hive/WebHCat.
+
+
+Assumptions
+It is assumed that you have a properly set up Hadoop2 cluster running.
+
+High level workflow
+1. Build Hive (e.g. mvn clean package -Phadoop-2,dist -DskipTests)
+2. Define variables in env.sh. This should be the only file you must change.
+3. Run restart_hive_redeploy_artifacts.sh, which will
+ a. Stop Hive Metastore, WebHCat server
+ b. Delete dirs in HDFS which may be there from previous runs. Currently this is used with a
+ cluster that is only used for WebHCat e2e tests so make sure to see what this will delete if
+ the cluster is used for something else.
+ c. Copy hive-site.xml and webhcat-site.xml under HIVE_HOME with minimal config needed to start
+ the services and run e2e tests.
+ d. Start Hive Metastore and WebHCat servers
+ e. Copy various artifacts to HDFS as explained in e2e/templeton/README.txt.
+4. Now you can run the test command as explained in e2e/templeton/README.txt.
+
+
+If you would like to make this better (in no particular order):
+1. env.sh is sourced from all other scripts but only work if the 'other' script is called from
+ deployers/.
+2. send 'derby.log' somewhere in /tmp/
+3. some tests (e.g. Sqoop) require an RDMBS set up with resources pre-created. See if this can
+ be automated.
+ (At least truncating the table between runs).
+4. Make the same work on Windows (w/o making a copy of each .sh file if at all possible)
+5. Configure a working (even pseudo-dist) Hadoop-2 cluster takes some knowledge. It may be to
+ script taking of Hadoop binary tar file, exploding it and copying a few pre-canned config files
+ to it (mapred-site, yarn-site, etc) to make sure the can easily set up a test env.
+6. Make this set of scripts work with Hadoop-1 (should not take much effort, if any).
diff --git hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh
new file mode 100755
index 0000000..be3bbf5
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+
+
+# This script deletes things from DFS that may have been left over from previous runs of e2e
+# tests to make sure next run starts with a clean slate.
+
+. ./env.sh
+
+echo "Deleting artifacts from HDFS..."
+
+${HADOOP_HOME}/bin/hdfs dfs -rm -r /user/hive/ /user/${USER}/ /user/templeton /apps /tmp /sqoopoutputdir
+${HADOOP_HOME}/bin/hdfs dfs -mkdir -p /tmp/hadoop-${USER} /user/hive/warehouse /user/${USER}/ /user/templeton /apps/templeton /tmp/hadoop-yarn /tmp/templeton_test_out
+${HADOOP_HOME}/bin/hdfs dfs -chmod -R a+rwx /user /tmp/
+${HADOOP_HOME}/bin/hdfs dfs -chmod g+rwx /user/hive/warehouse
diff --git hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml
new file mode 100644
index 0000000..113ea2b
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml
@@ -0,0 +1,123 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ javax.jdo.option.ConnectionURL
+ jdbc:sqlserver://172.16.65.152:1433;databaseName=master
+
+
+ javax.jdo.option.ConnectionUserName
+ hive
+
+
+ javax.jdo.option.ConnectionPassword
+ hive
+
+
+ javax.jdo.option.ConnectionDriverName
+ com.microsoft.sqlserver.jdbc.SQLServerDriver
+
+
+ datanucleus.autoCreateSchema
+ false
+
+
+
+
+
+
+
diff --git hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml
new file mode 100644
index 0000000..bebf6a6
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml
@@ -0,0 +1,87 @@
+
+
+
+
+
+
+ javax.jdo.option.ConnectionURL
+
+ jdbc:derby:;databaseName=/tmp/webhcat_e2e/logs/webhcat_e2e_metastore_db;create=true
+
+
+
+ hive.metastore.uris
+ thrift://localhost:9933
+ For Hive CLI to connect to
+
+
+
+
+
diff --git hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
new file mode 100644
index 0000000..00c660c
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
@@ -0,0 +1,134 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ templeton.hcat
+ ${env.HCAT_PREFIX}/bin/hcat
+ The path to the hcatalog executable.
+
+
+
+ templeton.libjars
+ ${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.5.jar
+ Jars to add to the classpath.
+
+
+
+ templeton.pig.archive
+ hdfs:///apps/templeton/pig-${env.PIG_VERSION}.tar.gz
+ The path to the Pig archive.
+
+
+ templeton.pig.path
+ pig-${env.PIG_VERSION}.tar.gz/pig-${env.PIG_VERSION}/bin/pig
+ The path to the Pig executable.
+
+
+ templeton.hive.archive
+ hdfs:///apps/templeton/apache-hive-${env.HIVE_VERSION}-bin.tar.gz
+ The path to the Hive archive.
+
+
+
+ templeton.hive.path
+ apache-hive-${env.HIVE_VERSION}-bin.tar.gz/apache-hive-${env.HIVE_VERSION}-bin/bin/hive
+ The path to the Hive executable.
+
+
+
+ templeton.hive.home
+ apache-hive-${env.HIVE_VERSION}-bin.tar.gz/apache-hive-${env.HIVE_VERSION}-bin
+ The path to the Hive home within the tar. This is needed if
+ Hive is not installed on all nodes in the cluster and needs to be
+ shipped to the target node in the cluster to execute Pig job which uses
+ HCat, Hive query, etc.
+
+
+ templeton.hcat.home
+ apache-hive-${env.HIVE_VERSION}-bin.tar.gz/apache-hive-${env.HIVE_VERSION}-bin/hcatalog
+ The path to the HCat home within the tar. This is needed if
+ Hive is not installed on all nodes in the cluster and needs to be
+ shipped to the target node in the cluster to execute Pig job which uses
+ HCat, Hive query, etc.
+
+
+
+ templeton.controller.mr.child.opts
+ -Xmx64m -Djava.net.preferIPv4Stack=true
+
+
+
+
+ templeton.hive.properties
+ hive.metastore.uris=thrift://localhost:9933,hive.metastore.sasl.enabled=false
+
+
+
+
diff --git hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
new file mode 100755
index 0000000..29c3f3e
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+
+#This script copies files needed by e2e tests to DFS
+
+source ./env.sh
+
+echo "Deploying artifacts to HDFS..."
+
+${HADOOP_HOME}/bin/hdfs dfs -put ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/inpdir/ webhcate2e
+#For hadoop1 we copy the same file with 2 names
+#$HADOOP_HOME/bin/hadoop fs -put hadoop-examples-1.2.1.jar webhcate2e/hexamples.jar
+#$HADOOP_HOME/bin/hadoop fs -put hadoop-examples-1.2.1.jar webhcate2e/hclient.jar
+
+#For hadoop2 there are 2 separate jars
+${HADOOP_HOME}/bin/hdfs dfs -put ${HADOOP_HOME}/share/hadoop/mapreduce/hadoop-mapreduce-examples-${HADOOP_VERSION}.jar webhcate2e/hexamples.jar
+${HADOOP_HOME}/bin/hdfs dfs -put ${HADOOP_HOME}/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-${HADOOP_VERSION}.jar webhcate2e/hclient.jar
+${HADOOP_HOME}/bin/hdfs dfs -put ${HADOOP_HOME}/share/hadoop/tools/lib/hadoop-streaming-${HADOOP_VERSION}.jar /user/templeton/hadoop-streaming.jar
+
+
+#must match config/webhcat/webhcat-stie.xml
+${HADOOP_HOME}/bin/hdfs dfs -put ${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin.tar.gz /apps/templeton/apache-hive-${HIVE_VERSION}-bin.tar.gz
+# To run against Hadoop2 cluster, you have to build Pig tar yourself with
+# "ant -Dforrest.home=$FORREST_HOME -Dhadoopversion=23 clean tar"
+${HADOOP_HOME}/bin/hadoop fs -put ${PIG_TAR_PATH}/pig-${PIG_VERSION}.tar.gz /apps/templeton/pig-${PIG_VERSION}.tar.gz
+${HADOOP_HOME}/bin/hadoop fs -put ${PIG_PIGGYBANK_PATH} webhcate2e/
+#standard Pig distro from ASF for Hadoop 1
+# ${HADOOP_HOME}/bin/hadoop fs -put /Users/ekoifman/dev/data/jarsForTmplte2e/pig-0.12.0.tar.gz /apps/templeton/pig-0.12.0.tar.gz
+#${HADOOP_HOME}/bin/hadoop fs -put /Users/ekoifman/dev/data/jarsForTmplte2e/pig-0.12.0/contrib/piggybank/java/piggybank.jar webhcate2e/
+
+
+${HADOOP_HOME}/bin/hadoop fs -put ${HIVE_HOME}/lib/zookeeper-3.4.5.jar /apps/templeton/zookeeper-3.4.5.jar
+
+#check what got deployed
+${HADOOP_HOME}/bin/hdfs dfs -ls /apps/templeton webhcate2e /user/templeton /user/hive/warehouse
diff --git hcatalog/src/test/e2e/templeton/deployers/env.sh hcatalog/src/test/e2e/templeton/deployers/env.sh
new file mode 100755
index 0000000..1517045
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/env.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#set -x;
+
+# define necessary env vars here and source it in other files
+
+export HADOOP_VERSION=2.4.1-SNAPSHOT
+export HIVE_VERSION=0.14.0-SNAPSHOT
+export PIG_VERSION=0.12.0
+
+#Root of project source tree
+export PROJ_HOME=/Users/${USER}/dev/hive
+export HIVE_HOME=${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin/apache-hive-${HIVE_VERSION}-bin
+export HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
+#export SQOOP_HOME=/
+
+#Make sure Pig is built for the Hadoop version you are running
+export PIG_TAR_PATH=/Users/${USER}/dev/pig-0.12.0-src/build
+#this is part of Pig distribution
+export PIG_PIGGYBANK_PATH=/Users/${USER}/dev/pig-0.12.0-src/build/tar/pig-0.12.0/contrib/piggybank/java/piggybank.jar
+
+export WEBHCAT_LOG_DIR=/tmp/webhcat_e2e/logs
+export WEBHCAT_PID_DIR=${WEBHCAT_LOG_DIR}
+#config/hive/hive-site.xml should match this path - it doesn't understand env vars
+export METASTORE_DB=${WEBHCAT_LOG_DIR}/wehcat_e2e_metastore_db
diff --git hcatalog/src/test/e2e/templeton/deployers/restart_hive_redeploy_artifacts.sh hcatalog/src/test/e2e/templeton/deployers/restart_hive_redeploy_artifacts.sh
new file mode 100755
index 0000000..2b46a85
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/restart_hive_redeploy_artifacts.sh
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+
+#This is convenience complete lifecycle script useful for developers.
+
+./stop_hive_services.sh
+./clean_file_system.sh;
+./deploy_e2e_artifacts.sh;
+./start_hive_services.sh;
diff --git hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
new file mode 100755
index 0000000..0ead10a
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#This script copies precanned *-site.xml files need to start Hive and WebHCat services, then
+#starts the services
+
+
+source ./env.sh
+
+#decide which DB to run against
+cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml ${HIVE_HOME}/conf/hive-site.xml
+#cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml ${HIVE_HOME}/conf/hive-site.xml
+
+cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml ${HIVE_HOME}/hcatalog/etc/webhcat/webhcat-site.xml
+
+if [ -d ${WEBHCAT_LOG_DIR} ]; then
+ rm -Rf ${WEBHCAT_LOG_DIR};
+fi
+mkdir -p ${WEBHCAT_LOG_DIR};
+echo "Starting Metastore..."
+nohup ${HIVE_HOME}/bin/hive --service metastore -p9933 >>${WEBHCAT_LOG_DIR}/metastore_console.log 2>>${WEBHCAT_LOG_DIR}/metastore_error.log &
+echo $! > ${WEBHCAT_LOG_DIR}/metastore.pid
+echo "Starting WebHCat..."
+${HIVE_HOME}/hcatalog/sbin/webhcat_server.sh start
+
+jps;
diff --git hcatalog/src/test/e2e/templeton/deployers/stop_hive_services.sh hcatalog/src/test/e2e/templeton/deployers/stop_hive_services.sh
new file mode 100755
index 0000000..a26c557
--- /dev/null
+++ hcatalog/src/test/e2e/templeton/deployers/stop_hive_services.sh
@@ -0,0 +1,32 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#This script stops the Hive and WebHCat services started by start_hive_services.sh
+
+
+source ./env.sh
+
+echo "Stopping Metastore...";
+kill `cat ${WEBHCAT_LOG_DIR}/metastore.pid`;
+echo "Stopping WebHCat...";
+${HIVE_HOME}/hcatalog/sbin/webhcat_server.sh stop;
+
+rm ${HIVE_HOME}/conf/hive-site.xml
+rm ${HIVE_HOME}/hcatalog/etc/webhcat/webhcat-site.xml
diff --git hcatalog/webhcat/svr/pom.xml hcatalog/webhcat/svr/pom.xml
index 640369e..9d25363 100644
--- hcatalog/webhcat/svr/pom.xml
+++ hcatalog/webhcat/svr/pom.xml
@@ -141,6 +141,12 @@
+
+
+ .
+ src/main/config
+
+
org.apache.maven.plugins
diff --git hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
index 6e45ee2..f0ce3a3 100644
--- hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
+++ hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
@@ -38,8 +38,9 @@ function real_script_name() {
}
function usage() {
- echo "usage: $0 [start|stop|foreground]"
+ echo "usage: $0 [start|startDebug|stop|foreground]"
echo " start Start the Webhcat Server"
+ echo " startDebug Start the Webhcat Server listening for debugger on port 5005"
echo " stop Stop the Webhcat Server"
echo " foreground Run the Webhcat Server in the foreground"
exit 1
@@ -225,6 +226,10 @@ case $cmd in
start)
start_webhcat
;;
+ startDebug)
+ export HADOOP_OPTS="${HADOOP_OPTS} -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005"
+ start_webhcat
+ ;;
stop)
stop_webhcat
;;