diff --git bin/ext/metatool.sh bin/ext/metatool.sh
new file mode 100755
index 0000000..64011bc
--- /dev/null
+++ bin/ext/metatool.sh
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=metatool
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+metatool () {
+
+ CLASS=org.apache.hadoop.hive.metastore.tools.HiveMetaTool
+ execHiveCmd $CLASS "$@"
+}
+
+metatool_help () {
+ CLASS=org.apache.hadoop.hive.metastore.tools.HiveMetaTool
+ execHiveCmd $CLASS "--help"
+}
diff --git bin/metatool bin/metatool
new file mode 100755
index 0000000..df85300
--- /dev/null
+++ bin/metatool
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+hive --service metatool "$@"
diff --git build.xml build.xml
index 6712af9..1d378bc 100644
--- build.xml
+++ build.xml
@@ -419,6 +419,7 @@
+
diff --git eclipse-templates/TestHiveMetaTool.launchtemplate eclipse-templates/TestHiveMetaTool.launchtemplate
new file mode 100644
index 0000000..f6429af
--- /dev/null
+++ eclipse-templates/TestHiveMetaTool.launchtemplate
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git metastore/ivy.xml metastore/ivy.xml
index 3011d2f..1e42550 100644
--- metastore/ivy.xml
+++ metastore/ivy.xml
@@ -45,7 +45,7 @@
-
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 045b550..eab675a 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -20,12 +20,15 @@ package org.apache.hadoop.hive.metastore;
import static org.apache.commons.lang.StringUtils.join;
+import java.net.URI;
+import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@@ -3974,6 +3977,287 @@ public class ObjectStore implements RawStore, Configurable {
return join(storedVals,',');
}
+ /** The following API
+ *
+ * - executeJDOQLSelect
+ *
+ * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
+ *
+ */
+ @SuppressWarnings("finally")
+ public Collection> executeJDOQLSelect(String query) {
+
+ boolean committed = false;
+
+ LOG.info("HiveMetaTool:Executing query: " + query);
+ Collection> result = null;
+
+ try {
+ openTransaction();
+ Query q = pm.newQuery(query);
+ result = (Collection>) q.execute();
+ committed = commitTransaction();
+
+ } finally {
+ if (!committed) {
+ rollbackTransaction();
+ return null;
+ } else {
+ return result;
+ }
+ }
+
+ }
+
+ /** The following API
+ *
+ * - executeJDOQLUpdate
+ *
+ * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
+ *
+ */
+
+ @SuppressWarnings("finally")
+ public long executeJDOQLUpdate(String query) {
+
+ boolean committed = false;
+ long numUpdated = 0;
+
+ LOG.info("HiveMetaTool:Executing query: " + query);
+
+ try {
+ openTransaction();
+ Query q = pm.newQuery(query);
+ numUpdated = (Long) q.execute();
+ committed = commitTransaction();
+
+ } finally {
+ if (!committed) {
+ rollbackTransaction();
+ return -1;
+ } else {
+ return numUpdated;
+ }
+ }
+
+ }
+
+ /** The following API
+ *
+ * - listFSRoots
+ *
+ * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
+ *
+ */
+
+ @SuppressWarnings("finally")
+ public Set listFSRoots() {
+
+ boolean committed = false;
+ Set fsRoots = new HashSet();
+
+ try {
+
+ openTransaction();
+ Query query = pm.newQuery(MDatabase.class);
+
+ List mDBs = (List) query.execute();
+ pm.retrieveAll(mDBs);
+
+ for (MDatabase mDB:mDBs) {
+ fsRoots.add(mDB.getLocationUri());
+ }
+
+ committed = commitTransaction();
+
+ } finally {
+ if (!committed) {
+ rollbackTransaction();
+ return null;
+ } else {
+ return fsRoots;
+ }
+ }
+ }
+
+ private boolean updateRecord(URI onDiskUri, URI inputUri) {
+
+ String onDiskHost = onDiskUri.getHost();
+ String inputHost = inputUri.getHost();
+
+ int onDiskPort = onDiskUri.getPort();
+ int inputPort = inputUri.getPort();
+
+ String onDiskScheme = onDiskUri.getScheme();
+ String inputScheme = inputUri.getScheme();
+
+ if (inputPort != -1) {
+ if (inputPort != onDiskPort) {
+ return false;
+ }
+ }
+
+ if (inputScheme != null) {
+ if (onDiskScheme == null) {
+ return false;
+ }
+ if (!inputScheme.equalsIgnoreCase(onDiskScheme)) {
+ return false;
+ }
+ }
+
+ if (onDiskHost != null) {
+
+ if (!inputHost.equalsIgnoreCase(onDiskHost)) {
+ return false;
+ }
+ } else {
+ return false;
+ }
+
+ return true;
+ }
+
+ /** The following APIs
+ *
+ * - updateFSRootLocation
+ *
+ * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
+ *
+ */
+
+ @SuppressWarnings("finally")
+ public Map updateFSRootLocation(URI oldLoc, URI newLoc, boolean dryRun) {
+
+ boolean committed = false;
+
+ LOG.info("HiveMetaTool:Old FS root location: " + oldLoc.toString() +
+ " New FS root location: " + newLoc.toString());
+ LOG.info("HiveMetaTool:Updating FS root location...");
+
+ int count = 0;
+ int totalCount = 0;
+
+ Map updateLocations = new LinkedHashMap();
+
+ try {
+
+ openTransaction();
+
+ // update locationURI in mDatabase
+ Query query = pm.newQuery(MDatabase.class);
+
+ List mDBs = (List) query.execute();
+ pm.retrieveAll(mDBs);
+
+ LOG.info("HiveMetaTool:Looking for location in DB_LOCATION_URI field in DBS table...");
+
+ for(MDatabase mDB:mDBs) {
+
+ URI locationURI = new URI(mDB.getLocationUri());
+
+ if (updateRecord(locationURI, oldLoc)) {
+
+ String dbLoc = mDB.getLocationUri().replaceAll(oldLoc.toString(), newLoc.toString());
+ if (dryRun) {
+ updateLocations.put(locationURI.toString(), dbLoc);
+ } else {
+ mDB.setLocationUri(dbLoc);
+ }
+ count++;
+ }
+ }
+
+ LOG.info("HiveMetaTool:Found " + count + " records to update");
+
+ // upgrade location in mStorageDescriptor
+ query = pm.newQuery(MStorageDescriptor.class);
+
+ List mSDSs = (List) query.execute();
+ pm.retrieveAll(mSDSs);
+
+ LOG.info("HiveMetaTool:Looking for location in LOCATION field in SDS table...");
+
+ totalCount += count;
+ count = 0;
+
+ for(MStorageDescriptor mSDS:mSDSs) {
+
+ URI locationURI = new URI(mSDS.getLocation());
+
+ if (updateRecord(locationURI, oldLoc)) {
+ String tblLoc = mSDS.getLocation().replaceAll(oldLoc.toString(), newLoc.toString());
+ if (dryRun) {
+ updateLocations.put(locationURI.toString(), tblLoc);
+ } else {
+ mSDS.setLocation(tblLoc);
+ }
+ count++;
+ }
+ }
+
+ LOG.info("HiveMetaTool:Found " + count + " records to update");
+
+ // upgrade schema.url for avro serde
+ query = pm.newQuery(MSerDeInfo.class);
+
+ List mSerdes = (List) query.execute();
+ pm.retrieveAll(mSerdes);
+
+ LOG.info("HiveMetaTool:Looking for location in the value field of schema.url " +
+ "key in SERDES table...");
+
+ totalCount += count;
+ count = 0;
+
+ for(MSerDeInfo mSerde:mSerdes) {
+ String key = new String("schema.url");
+ String schemaLoc = mSerde.getParameters().get(key);
+
+ if (schemaLoc != null) {
+
+ URI schemaLocURI = new URI(schemaLoc);
+ if (updateRecord(schemaLocURI, oldLoc)) {
+ String newSchemaLoc = schemaLoc.replaceAll(oldLoc.toString(), newLoc.toString());
+ if (dryRun) {
+ updateLocations.put(schemaLocURI.toString(), newSchemaLoc);
+ } else {
+ mSerde.getParameters().put(key, newSchemaLoc);
+ }
+ count++;
+ }
+ }
+ }
+
+ LOG.info("HiveMetaTool:Found " + count + " records to update");
+ totalCount += count;
+
+ committed = commitTransaction();
+ } catch (URISyntaxException e) {
+ LOG.error("HiveMetaTool:Encountered error while validating location URI"
+ + e.getLocalizedMessage());
+ } finally {
+ if (!committed) {
+ rollbackTransaction();
+ if (!dryRun) {
+ LOG.info("HiveMetaTool:Failed to update the FS root location");
+ } else {
+ LOG.info("HiveMetaTool: dryRun failed");
+ }
+ } else {
+ if (!dryRun && totalCount > 0) {
+ LOG.info("HiveMetaTool:Successfully updated FS root location");
+ LOG.info("HiveMetaTool:Total number of enteries updated: " + totalCount);
+ }
+ }
+ if (dryRun) {
+ return updateLocations;
+ } else {
+ return null;
+ }
+ }
+ }
+
@Override
public long cleanupEvents() {
boolean commited = false;
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
new file mode 100644
index 0000000..a414e3b
--- /dev/null
+++ metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
@@ -0,0 +1,220 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.tools;
+
+import java.net.URI;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+
+/**
+ * This class provides Hive admins a tool to
+ * - execute JDOQL against the metastore using DataNucleus
+ * - perform HA name node upgrade
+ */
+
+public class HiveMetaTool {
+
+ private static final Log LOG = LogFactory.getLog(HiveMetaTool.class.getName());
+ private final Options cmdLineOptions = new Options();
+
+ public HiveMetaTool() {
+ }
+
+ @SuppressWarnings("static-access")
+ private void init() {
+
+ System.out.println("Initializing HiveMetaTool..");
+
+ Option help = new Option("help", "print this message");
+ Option listFSRoot = new Option("listFSRoot", "print the current FS root locations");
+ Option executeJDOQL =
+ OptionBuilder.withArgName("query-string")
+ .hasArgs()
+ .withDescription("execute the given JDOQL query")
+ .create("executeJDOQL");
+
+
+ Option updateFSRootLoc =
+ OptionBuilder
+ .withArgName("new-loc> " + "" + " <--dryRun")
+ .hasArgs(3)
+ .withDescription(
+ "update FS root location in the metastore to new location. Both new-loc and" +
+ " old-loc should be valid URIs with valid host names and schemes. " +
+ "when run with the dryRun option changes are displayed but are not persisted.")
+ .create("updateLocation");
+
+ cmdLineOptions.addOption(help);
+ cmdLineOptions.addOption(listFSRoot);
+ cmdLineOptions.addOption(executeJDOQL);
+ cmdLineOptions.addOption(updateFSRootLoc);
+
+ }
+
+ public static void printUpdateLocations(Map updateLocations) {
+
+ for (String key: updateLocations.keySet()) {
+ String value = updateLocations.get(key);
+ System.out.println("HiveMetaTool:current location: " + key + " new location: " + value);
+ }
+ }
+
+ public static void main(String[] args) {
+
+ HiveConf hiveConf = new HiveConf(HiveMetaTool.class);
+
+ HiveMetaTool metaTool = new HiveMetaTool();
+ ObjectStore objStore = null;
+ metaTool.init();
+
+ CommandLineParser parser = new GnuParser();
+ CommandLine line = null;
+
+ try {
+
+ line = parser.parse(metaTool.cmdLineOptions, args);
+ } catch (ParseException e) {
+ System.err.println("HiveMetaTool:Parsing failed. Reason: " + e.getLocalizedMessage());
+ }
+
+ if (line.hasOption("help")) {
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("metatool", metaTool.cmdLineOptions);
+ } else if (line.hasOption("listFSRoot")) {
+ objStore = new ObjectStore();
+ objStore.setConf(hiveConf);
+ Set hdfsRoots = objStore.listFSRoots();
+
+ if (hdfsRoots != null) {
+ System.out.println("HiveMetaTool:Listing FS Roots..");
+ for (String s : hdfsRoots) {
+ System.out.println(s);
+ }
+ } else {
+ System.err.println("HiveMetaTool:Encountered error during listFSRoot");
+ }
+
+ } else if (line.hasOption("executeJDOQL")) {
+ String query = line.getOptionValue("executeJDOQL");
+
+ objStore = new ObjectStore();
+ objStore.setConf(hiveConf);
+
+ if (query.toLowerCase().trim().startsWith("select")) {
+ Collection> result = objStore.executeJDOQLSelect(query);
+ if (result != null) {
+ Iterator> iter = result.iterator();
+ while (iter.hasNext()) {
+ Object o = iter.next();
+ System.out.println(o.toString());
+ }
+ } else {
+ System.err.println("HiveMetaTool:Encountered error during executeJDOQL");
+ }
+ } else if (query.toLowerCase().trim().startsWith("update")) {
+ long numUpdated = objStore.executeJDOQLUpdate(query);
+ if (numUpdated >= 0) {
+ System.out.println("HiveMetaTool:Number of records updated: " + numUpdated);
+ } else {
+ System.err.println("HiveMetaTool:Encountered error during executeJDOQL");
+ }
+ } else {
+ System.err.println("HiveMetaTool:Unsupported statement type");
+ }
+
+ } else if (line.hasOption("updateLocation")) {
+ String[] loc = line.getOptionValues("updateLocation");
+ boolean dryRun = false;
+ Map updateLocations;
+
+ if (loc.length != 2 && loc.length != 3) {
+ System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 " +
+ "optional arguements but " +
+ "was passed " + loc.length + " arguements");
+ }
+
+ Path newPath = new Path(loc[0]);
+ Path oldPath = new Path(loc[1]);
+
+ URI oldURI = oldPath.toUri();
+ URI newURI = newPath.toUri();
+
+ if (loc.length == 3) {
+ if (loc[2].equals("dryRun") ||
+ loc[2].equals("-dryRun") ||
+ loc[2].equals("--dryRun")) {
+ dryRun = true;
+ }
+ }
+
+ /*
+ * validate input - if the old uri contains a valid port, the new uri should contain
+ * a valid port as well. Both new and old uri should contain valid host names and
+ * valid schemes.
+ */
+ if (oldURI.getHost() == null ||
+ newURI.getHost() == null) {
+ System.err.println("HiveMetaTool:A valid host is required in both old-loc and new-loc");
+ } else if (oldURI.getPort() > 0 && newURI.getPort() < 0) {
+ System.err.println("HiveMetaTool:old-loc has a valid port, new-loc should " +
+ "also contain a valid port");
+ } else if (oldURI.getScheme() == null || newURI.getScheme() == null) {
+ System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc");
+ } else {
+ objStore = new ObjectStore();
+ objStore.setConf(hiveConf);
+ if (dryRun) {
+ updateLocations = objStore.updateFSRootLocation(oldURI, newURI, dryRun);
+ printUpdateLocations(updateLocations);
+ } else {
+ objStore.updateFSRootLocation(oldURI, newURI, dryRun);
+ }
+ }
+ } else {
+ System.err.print("HiveMetaTool:Invalid option:");
+ for (String s : line.getArgs()) {
+ System.err.print(s + " ");
+ }
+ System.err.println();
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("metatool", metaTool.cmdLineOptions);
+ }
+
+ if (objStore != null) {
+ System.out.println("HiveMetaTool shutdown..");
+ objStore.shutdown();
+ }
+ }
+}
diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
new file mode 100644
index 0000000..adb5a71
--- /dev/null
+++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
@@ -0,0 +1,241 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.Type;
+import org.apache.hadoop.hive.metastore.tools.HiveMetaTool;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.util.StringUtils;
+
+public class TestHiveMetaTool extends TestCase {
+
+ private HiveMetaStoreClient client;
+
+ private PrintStream originalOut;
+ private OutputStream os;
+ private PrintStream ps;
+ private String locationUri;
+
+
+ private void dropDatabase(String dbName) throws Exception {
+
+ try {
+ client.dropDatabase(dbName);
+ } catch (NoSuchObjectException e) {
+ } catch (InvalidOperationException e) {
+ } catch (Exception e) {
+ throw e;
+ }
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+
+ super.setUp();
+
+ try {
+
+ HiveConf hiveConf = new HiveConf(HiveMetaTool.class);
+ client = new HiveMetaStoreClient(hiveConf, null);
+
+ // Setup output stream to redirect output to
+ os = new ByteArrayOutputStream();
+ ps = new PrintStream(os);
+
+ // create a dummy database and a couple of dummy tables
+ String dbName = "testDB";
+ String typeName = "Person";
+ String tblName = "simpleTbl";
+
+ Database db = new Database();
+ db.setName(dbName);
+ client.dropTable(dbName, tblName);
+ dropDatabase(dbName);
+ client.createDatabase(db);
+ locationUri = db.getLocationUri();
+
+ client.dropType(typeName);
+ Type typ1 = new Type();
+ typ1.setName(typeName);
+ typ1.setFields(new ArrayList(2));
+ typ1.getFields().add(
+ new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+ typ1.getFields().add(
+ new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
+ client.createType(typ1);
+
+ Table tbl = new Table();
+ tbl.setDbName(dbName);
+ tbl.setTableName(tblName);
+ StorageDescriptor sd = new StorageDescriptor();
+ tbl.setSd(sd);
+ sd.setCols(typ1.getFields());
+ sd.setCompressed(false);
+ sd.setNumBuckets(1);
+ sd.setParameters(new HashMap());
+ sd.getParameters().put("test_param_1", "Use this for comments etc");
+ sd.setBucketCols(new ArrayList(2));
+ sd.getBucketCols().add("name");
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(tbl.getTableName());
+ sd.getSerdeInfo().setParameters(new HashMap());
+ sd.getSerdeInfo().getParameters().put(
+ org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
+ sd.getSerdeInfo().setSerializationLib(
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+ tbl.setPartitionKeys(new ArrayList());
+
+ client.createTable(tbl);
+ client.close();
+
+ } catch (Throwable e) {
+ System.err.println("Unable to setup the hive metatool test");
+ System.err.println(StringUtils.stringifyException(e));
+ throw new Exception(e);
+ }
+ }
+
+ private void redirectOutputStream() {
+
+ originalOut = System.out;
+ System.setOut(ps);
+
+ }
+
+ private void restoreOutputStream() {
+
+ System.setOut(originalOut);
+ }
+
+ public void testListFSRoot() {
+
+ redirectOutputStream();
+ String[] args = new String[1];
+ args[0] = new String("-listFSRoot");
+
+ try {
+ HiveMetaTool.main(args);
+ String out = os.toString();
+ boolean b = out.contains(locationUri);
+ assertTrue(b);
+ } catch (Exception e) {
+ System.err.println("Exception during testlistFSRoot");
+ System.err.println(StringUtils.stringifyException(e));
+ e.printStackTrace();
+ } finally {
+ restoreOutputStream();
+ System.out.println("Completed testListFSRoot");
+ }
+ }
+
+ public void testExecuteJDOQL() {
+
+ redirectOutputStream();
+ String[] args = new String[2];
+ args[0] = new String("-executeJDOQL");
+ args[1] = new String("select locationUri from org.apache.hadoop.hive.metastore.model.MDatabase");
+
+ try {
+ HiveMetaTool.main(args);
+ String out = os.toString();
+ boolean b = out.contains(locationUri);
+ assertTrue(b);
+ } catch (Exception e) {
+ System.err.println("Exception during testExecuteJDOQL");
+ System.err.println(StringUtils.stringifyException(e));
+ e.printStackTrace();
+ } finally {
+ restoreOutputStream();
+ System.out.println("Completed testExecuteJDOQL");
+ }
+ }
+
+ public void testUpdateFSRootLocation() {
+
+ redirectOutputStream();
+ String newLocationUri = "hdfs://nn-ha-uri/user/hive/warehouse";
+ String[] args = new String[3];
+ args[0] = new String("-updateLocation");
+ args[1] = new String(newLocationUri);
+ args[2] = new String(locationUri);
+
+ String[] args2 = new String[1];
+ args2[0] = new String("-listFSRoot");
+
+ try {
+
+ // perform HA upgrade
+ HiveMetaTool.main(args);
+
+ // obtain new HDFS root
+ HiveMetaTool.main(args2);
+
+ String out = os.toString();
+ boolean b = out.contains(newLocationUri);
+
+ if (b) {
+ System.out.println("updateFSRootLocation successful");
+ } else {
+ System.out.println("updateFSRootLocation failed");
+ }
+ // restore the original HDFS root if needed
+ if (b) {
+ args[1] = new String(locationUri);
+ args[2] = new String(newLocationUri);
+ HiveMetaTool.main(args);
+ }
+ } catch (Exception e) {
+ System.err.println("Exception during testUpdateFSRootLocation");
+ System.err.println(StringUtils.stringifyException(e));
+ e.printStackTrace();
+ } finally {
+ restoreOutputStream();
+ System.out.println("Completed testUpdateFSRootLocation..");
+ ;
+ }
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ try {
+ super.tearDown();
+
+ } catch (Throwable e) {
+ System.err.println("Unable to close metastore");
+ System.err.println(StringUtils.stringifyException(e));
+ throw new Exception(e);
+ }
+ }
+}