diff --git a/bin/ext/metatool.sh b/bin/ext/metatool.sh index 20e1c01..c67fd2e 100644 --- a/bin/ext/metatool.sh +++ b/bin/ext/metatool.sh @@ -18,12 +18,12 @@ export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} " metatool () { HIVE_OPTS='' - CLASS=org.apache.hadoop.hive.metastore.tools.HiveMetaTool + CLASS=org.apache.hadoop.hive.metastore.tools.metatool.HiveMetaTool execHiveCmd $CLASS "$@" } metatool_help () { HIVE_OPTS='' - CLASS=org.apache.hadoop.hive.metastore.tools.HiveMetaTool + CLASS=org.apache.hadoop.hive.metastore.tools.metatool.HiveMetaTool execHiveCmd $CLASS "--help" } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java deleted file mode 100644 index 5bd83ac..0000000 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import java.io.ByteArrayOutputStream; -import java.io.OutputStream; -import java.io.PrintStream; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; - -import junit.framework.TestCase; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; -import org.apache.hadoop.hive.metastore.api.SerDeInfo; -import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.metastore.api.Type; -import org.apache.hadoop.hive.metastore.tools.HiveMetaTool; -import org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat; -import org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat; -import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; -import org.apache.hadoop.util.StringUtils; - -public class TestHiveMetaTool extends TestCase { - - private HiveMetaStoreClient client; - - private PrintStream originalOut; - private OutputStream os; - private PrintStream ps; - private String locationUri; - private final String dbName = "TestHiveMetaToolDB"; - private final String typeName = "Person"; - private final String tblName = "simpleTbl"; - private final String badTblName = "badSimpleTbl"; - - - private void dropDatabase(String dbName) throws Exception { - try { - client.dropDatabase(dbName); - } catch (NoSuchObjectException e) { - } catch (InvalidOperationException e) { - } catch (Exception e) { - throw e; - } - } - - @Override - protected void setUp() throws Exception { - super.setUp(); - - try { - HiveConf hiveConf = new HiveConf(HiveMetaTool.class); - client = new HiveMetaStoreClient(hiveConf); - - // Setup output stream to redirect output to - os = new ByteArrayOutputStream(); - ps = new PrintStream(os); - - // create a dummy database and a couple of dummy tables - Database db = new Database(); - db.setName(dbName); - client.dropTable(dbName, tblName); - client.dropTable(dbName, badTblName); - dropDatabase(dbName); - client.createDatabase(db); - locationUri = db.getLocationUri(); - String avroUri = "hdfs://nn.example.com/warehouse/hive/ab.avsc"; - String badAvroUri = new String("hdfs:/hive"); - - client.dropType(typeName); - Type typ1 = new Type(); - typ1.setName(typeName); - typ1.setFields(new ArrayList(2)); - typ1.getFields().add( - new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, "")); - typ1.getFields().add( - new FieldSchema("income", serdeConstants.INT_TYPE_NAME, "")); - client.createType(typ1); - - Table tbl = new Table(); - tbl.setDbName(dbName); - tbl.setTableName(tblName); - Map parameters = new HashMap<>(); - parameters.put(AvroSerdeUtils.SCHEMA_URL, avroUri); - tbl.setParameters(parameters); - StorageDescriptor sd = new StorageDescriptor(); - tbl.setSd(sd); - sd.setCols(typ1.getFields()); - sd.setCompressed(false); - sd.setNumBuckets(1); - sd.setParameters(new HashMap()); - sd.getParameters().put("test_param_1", "Use this for comments etc"); - sd.setBucketCols(new ArrayList(2)); - sd.getBucketCols().add("name"); - sd.setSerdeInfo(new SerDeInfo()); - sd.getSerdeInfo().setName(tbl.getTableName()); - sd.getSerdeInfo().setParameters(new HashMap()); - sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); - sd.getParameters().put(AvroSerdeUtils.SCHEMA_URL, avroUri); - sd.getSerdeInfo().setSerializationLib( - org.apache.hadoop.hive.serde2.avro.AvroSerDe.class.getName()); - sd.setInputFormat(AvroContainerInputFormat.class.getName()); - sd.setOutputFormat(AvroContainerOutputFormat.class.getName()); - tbl.setPartitionKeys(new ArrayList()); - client.createTable(tbl); - - //create a table with bad avro uri - tbl = new Table(); - tbl.setDbName(dbName); - tbl.setTableName(badTblName); - sd = new StorageDescriptor(); - tbl.setSd(sd); - sd.setCols(typ1.getFields()); - sd.setCompressed(false); - sd.setNumBuckets(1); - sd.setParameters(new HashMap()); - sd.getParameters().put("test_param_1", "Use this for comments etc"); - sd.setBucketCols(new ArrayList(2)); - sd.getBucketCols().add("name"); - sd.setSerdeInfo(new SerDeInfo()); - sd.getSerdeInfo().setName(tbl.getTableName()); - sd.getSerdeInfo().setParameters(new HashMap()); - sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); - sd.getParameters().put(AvroSerdeUtils.SCHEMA_URL, badAvroUri); - sd.getSerdeInfo().setSerializationLib( - org.apache.hadoop.hive.serde2.avro.AvroSerDe.class.getName()); - sd.setInputFormat(AvroContainerInputFormat.class.getName()); - sd.setOutputFormat(AvroContainerOutputFormat.class.getName()); - - tbl.setPartitionKeys(new ArrayList()); - client.createTable(tbl); - client.close(); - } catch (Exception e) { - System.err.println("Unable to setup the hive metatool test"); - System.err.println(StringUtils.stringifyException(e)); - throw new Exception(e); - } - } - - private void redirectOutputStream() { - - originalOut = System.out; - System.setOut(ps); - - } - - private void restoreOutputStream() { - - System.setOut(originalOut); - } - - public void testListFSRoot() throws Exception { - - redirectOutputStream(); - String[] args = new String[1]; - args[0] = new String("-listFSRoot"); - - try { - HiveMetaTool.main(args); - String out = os.toString(); - boolean b = out.contains(locationUri); - assertTrue(b); - } finally { - restoreOutputStream(); - System.out.println("Completed testListFSRoot"); - } - } - - public void testExecuteJDOQL() throws Exception { - - redirectOutputStream(); - String[] args = new String[2]; - args[0] = new String("-executeJDOQL"); - args[1] = new String("select locationUri from org.apache.hadoop.hive.metastore.model.MDatabase"); - - try { - HiveMetaTool.main(args); - String out = os.toString(); - boolean b = out.contains(locationUri); - assertTrue(b); - } finally { - restoreOutputStream(); - System.out.println("Completed testExecuteJDOQL"); - } - } - - public void testUpdateFSRootLocation() throws Exception { - redirectOutputStream(); - String oldLocationUri = "hdfs://nn.example.com/"; - String newLocationUri = "hdfs://nn-ha-uri/"; - String oldSchemaUri = "hdfs://nn.example.com/warehouse/hive/ab.avsc"; - String newSchemaUri = "hdfs://nn-ha-uri/warehouse/hive/ab.avsc"; - - String[] args = new String[5]; - args[0] = new String("-updateLocation"); - args[1] = new String(newLocationUri); - args[2] = new String(oldLocationUri); - args[3] = new String("-tablePropKey"); - args[4] = new String("avro.schema.url"); - - try { - checkAvroSchemaURLProps(client.getTable(dbName, tblName), oldSchemaUri); - - // perform HA upgrade - HiveMetaTool.main(args); - String out = os.toString(); - boolean b = out.contains(newLocationUri); - restoreOutputStream(); - assertTrue(b); - checkAvroSchemaURLProps(client.getTable(dbName,tblName), newSchemaUri); - - //restore the original HDFS root - args[1] = new String(oldLocationUri); - args[2] = new String(newLocationUri); - redirectOutputStream(); - HiveMetaTool.main(args); - checkAvroSchemaURLProps(client.getTable(dbName,tblName), oldSchemaUri); - restoreOutputStream(); - } finally { - restoreOutputStream(); - System.out.println("Completed testUpdateFSRootLocation.."); - } - } - - private void checkAvroSchemaURLProps(Table table, String expectedURL) { - assertEquals(expectedURL, table.getParameters().get(AvroSerdeUtils.SCHEMA_URL)); - assertEquals(expectedURL, table.getSd().getParameters().get(AvroSerdeUtils.SCHEMA_URL)); - } - - @Override - protected void tearDown() throws Exception { - try { - client.dropTable(dbName, tblName); - client.dropTable(dbName, badTblName); - dropDatabase(dbName); - super.tearDown(); - client.close(); - } catch (Throwable e) { - System.err.println("Unable to close metastore"); - System.err.println(StringUtils.stringifyException(e)); - throw new Exception(e); - } - } -} diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java new file mode 100644 index 0000000..ba344d4 --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.io.PrintStream; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; +import org.apache.hadoop.util.StringUtils; +import org.apache.thrift.TException; + +/** Integration tests for the HiveMetaTool program. */ +public class TestHiveMetaTool extends TestCase { + private static final String DB_NAME = "TestHiveMetaToolDB"; + private static final String TABLE_NAME = "simpleTbl"; + private static final String LOCATION = "hdfs://nn.example.com/"; + private static final String NEW_LOCATION = "hdfs://nn-ha-uri/"; + private static final String PATH = "warehouse/hive/ab.avsc"; + private static final String AVRO_URI = LOCATION + PATH; + private static final String NEW_AVRO_URI = NEW_LOCATION + PATH; + + private HiveMetaStoreClient client; + private OutputStream os; + + @Override + protected void setUp() throws Exception { + super.setUp(); + + try { + os = new ByteArrayOutputStream(); + System.setOut(new PrintStream(os)); + + HiveConf hiveConf = new HiveConf(HiveMetaTool.class); + client = new HiveMetaStoreClient(hiveConf); + + createDatabase(); + createTable(); + + client.close(); + } catch (Exception e) { + System.err.println("Unable to setup the hive metatool test"); + System.err.println(StringUtils.stringifyException(e)); + throw new Exception(e); + } + } + + private void createDatabase() throws Exception { + if (client.getAllDatabases().contains(DB_NAME)) { + client.dropDatabase(DB_NAME); + } + + Database db = new Database(); + db.setName(DB_NAME); + client.createDatabase(db); + } + + private void createTable() throws Exception { + Table tbl = new Table(); + tbl.setDbName(DB_NAME); + tbl.setTableName(TABLE_NAME); + + Map parameters = new HashMap<>(); + parameters.put(AvroTableProperties.SCHEMA_URL.getPropName(), AVRO_URI); + tbl.setParameters(parameters); + + List fields = new ArrayList(2); + fields.add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, "")); + + StorageDescriptor sd = new StorageDescriptor(); + sd.setCols(fields); + sd.setParameters(new HashMap()); + sd.setSerdeInfo(new SerDeInfo()); + sd.getSerdeInfo().setParameters(new HashMap()); + sd.getParameters().put(AvroTableProperties.SCHEMA_URL.getPropName(), AVRO_URI); + tbl.setSd(sd); + + client.createTable(tbl); + } + + public void testListFSRoot() throws Exception { + HiveMetaTool.main(new String[] {"-listFSRoot"}); + String out = os.toString(); + assertTrue(out + " doesn't contain " + client.getDatabase(DB_NAME).getLocationUri(), + out.contains(client.getDatabase(DB_NAME).getLocationUri())); + } + + public void testExecuteJDOQL() throws Exception { + HiveMetaTool.main( + new String[] {"-executeJDOQL", "select locationUri from org.apache.hadoop.hive.metastore.model.MDatabase"}); + String out = os.toString(); + assertTrue(out + " doesn't contain " + client.getDatabase(DB_NAME).getLocationUri(), + out.contains(client.getDatabase(DB_NAME).getLocationUri())); + } + + public void testUpdateFSRootLocation() throws Exception { + checkAvroSchemaURLProps(AVRO_URI); + + HiveMetaTool.main(new String[] {"-updateLocation", NEW_LOCATION, LOCATION, "-tablePropKey", "avro.schema.url"}); + checkAvroSchemaURLProps(NEW_AVRO_URI); + + HiveMetaTool.main(new String[] {"-updateLocation", LOCATION, NEW_LOCATION, "-tablePropKey", "avro.schema.url"}); + checkAvroSchemaURLProps(AVRO_URI); + } + + private void checkAvroSchemaURLProps(String expectedUri) throws TException { + Table table = client.getTable(DB_NAME, TABLE_NAME); + assertEquals(expectedUri, table.getParameters().get(AvroTableProperties.SCHEMA_URL.getPropName())); + assertEquals(expectedUri, table.getSd().getParameters().get(AvroTableProperties.SCHEMA_URL.getPropName())); + } + + @Override + protected void tearDown() throws Exception { + try { + client.dropTable(DB_NAME, TABLE_NAME); + client.dropDatabase(DB_NAME); + super.tearDown(); + client.close(); + } catch (Throwable e) { + System.err.println("Unable to close metastore"); + System.err.println(StringUtils.stringifyException(e)); + throw new Exception(e); + } + } +} diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java new file mode 100644 index 0000000..1ddb66e --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Integration tests for the HiveMetaTool program. */ +package org.apache.hadoop.hive.metastore.tools.metatool; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnExIm.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnExIm.java index 06e3ef2..624bc01 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnExIm.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnExIm.java @@ -19,7 +19,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.apache.hadoop.hive.metastore.tools.HiveMetaTool; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java deleted file mode 100644 index a50c0a3..0000000 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java +++ /dev/null @@ -1,490 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore.tools; - -import java.net.URI; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.GnuParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionBuilder; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.ObjectStore; - -/** - * This class provides Hive admins a tool to - * - execute JDOQL against the metastore using DataNucleus - * - perform HA name node upgrade - */ - -public class HiveMetaTool { - - private static final Logger LOG = LoggerFactory.getLogger(HiveMetaTool.class.getName()); - private final Options cmdLineOptions = new Options(); - private ObjectStore objStore; - private boolean isObjStoreInitialized; - - public HiveMetaTool() { - this.isObjStoreInitialized = false; - } - - @SuppressWarnings("static-access") - private void init() { - - System.out.println("Initializing HiveMetaTool.."); - - Option help = new Option("help", "print this message"); - Option listFSRoot = new Option("listFSRoot", "print the current FS root locations"); - Option executeJDOQL = - OptionBuilder.withArgName("query-string") - .hasArgs() - .withDescription("execute the given JDOQL query") - .create("executeJDOQL"); - - /* Ideally we want to specify the different arguments to updateLocation as separate argNames. - * However if we did that, HelpFormatter swallows all but the last argument. Note that this is - * a know issue with the HelpFormatter class that has not been fixed. We specify all arguments - * with a single argName to workaround this HelpFormatter bug. - */ - Option updateFSRootLoc = - OptionBuilder - .withArgName("new-loc> " + " hdfsRoots = objStore.listFSRoots(); - if (hdfsRoots != null) { - System.out.println("Listing FS Roots.."); - for (String s : hdfsRoots) { - System.out.println(s); - } - } else { - System.err.println("Encountered error during listFSRoot - " + - "commit of JDO transaction failed"); - } - } - - private void executeJDOQLSelect(String query) { - Configuration conf = MetastoreConf.newMetastoreConf(); - initObjectStore(conf); - - System.out.println("Executing query: " + query); - try (ObjectStore.QueryWrapper queryWrapper = new ObjectStore.QueryWrapper()) { - Collection result = objStore.executeJDOQLSelect(query, queryWrapper); - if (result != null) { - Iterator iter = result.iterator(); - while (iter.hasNext()) { - Object o = iter.next(); - System.out.println(o.toString()); - } - } else { - System.err.println("Encountered error during executeJDOQLSelect -" + - "commit of JDO transaction failed."); - } - } - } - - private void executeJDOQLUpdate(String query) { - Configuration conf = MetastoreConf.newMetastoreConf(); - initObjectStore(conf); - - System.out.println("Executing query: " + query); - long numUpdated = objStore.executeJDOQLUpdate(query); - if (numUpdated >= 0) { - System.out.println("Number of records updated: " + numUpdated); - } else { - System.err.println("Encountered error during executeJDOQL -" + - "commit of JDO transaction failed."); - } - } - - private int printUpdateLocations(Map updateLocations) { - int count = 0; - for (String key: updateLocations.keySet()) { - String value = updateLocations.get(key); - System.out.println("old location: " + key + " new location: " + value); - count++; - } - return count; - } - - private void printTblURIUpdateSummary(ObjectStore.UpdateMStorageDescriptorTblURIRetVal retVal, - boolean isDryRun) { - String tblName = "SDS"; - String fieldName = "LOCATION"; - - if (retVal == null) { - System.err.println("Encountered error while executing updateMStorageDescriptorTblURI - " + - "commit of JDO transaction failed. Failed to update FSRoot locations in " + - fieldName + "field in " + tblName + " table."); - } else { - Map updateLocations = retVal.getUpdateLocations(); - if (isDryRun) { - System.out.println("Dry Run of updateLocation on table " + tblName + ".."); - } else { - System.out.println("Successfully updated the following locations.."); - } - int count = printUpdateLocations(updateLocations); - if (isDryRun) { - System.out.println("Found " + count + " records in " + tblName + " table to update"); - } else { - System.out.println("Updated " + count + " records in " + tblName + " table"); - } - List badRecords = retVal.getBadRecords(); - if (badRecords.size() > 0) { - System.err.println("Warning: Found records with bad " + fieldName + " in " + - tblName + " table.. "); - for (String badRecord:badRecords) { - System.err.println("bad location URI: " + badRecord); - } - } - int numNullRecords = retVal.getNumNullRecords(); - if (numNullRecords != 0) { - LOG.debug("Number of NULL location URI: " + numNullRecords + - ". This can happen for View or Index."); - } - } - } - - private void printDatabaseURIUpdateSummary(ObjectStore.UpdateMDatabaseURIRetVal retVal, - boolean isDryRun) { - String tblName = "DBS"; - String fieldName = "LOCATION_URI"; - - if (retVal == null) { - System.err.println("Encountered error while executing updateMDatabaseURI - " + - "commit of JDO transaction failed. Failed to update FSRoot locations in " + - fieldName + "field in " + tblName + " table."); - } else { - Map updateLocations = retVal.getUpdateLocations(); - if (isDryRun) { - System.out.println("Dry Run of updateLocation on table " + tblName + ".."); - } else { - System.out.println("Successfully updated the following locations.."); - } - int count = printUpdateLocations(updateLocations); - if (isDryRun) { - System.out.println("Found " + count + " records in " + tblName + " table to update"); - } else { - System.out.println("Updated " + count + " records in " + tblName + " table"); - } - List badRecords = retVal.getBadRecords(); - if (badRecords.size() > 0) { - System.err.println("Warning: Found records with bad " + fieldName + " in " + - tblName + " table.. "); - for (String badRecord:badRecords) { - System.err.println("bad location URI: " + badRecord); - } - } - } - } - - private void printPropURIUpdateSummary(ObjectStore.UpdatePropURIRetVal retVal, String - tablePropKey, boolean isDryRun, String tblName, String methodName) { - if (retVal == null) { - System.err.println("Encountered error while executing " + methodName + " - " + - "commit of JDO transaction failed. Failed to update FSRoot locations in " + - "value field corresponding to" + tablePropKey + " in " + tblName + " table."); - } else { - Map updateLocations = retVal.getUpdateLocations(); - if (isDryRun) { - System.out.println("Dry Run of updateLocation on table " + tblName + ".."); - } else { - System.out.println("Successfully updated the following locations.."); - } - int count = printUpdateLocations(updateLocations); - if (isDryRun) { - System.out.println("Found " + count + " records in " + tblName + " table to update"); - } else { - System.out.println("Updated " + count + " records in " + tblName + " table"); - } - List badRecords = retVal.getBadRecords(); - if (badRecords.size() > 0) { - System.err.println("Warning: Found records with bad " + tablePropKey + " key in " + - tblName + " table.. "); - for (String badRecord:badRecords) { - System.err.println("bad location URI: " + badRecord); - } - } - } - } - - private void printSerdePropURIUpdateSummary(ObjectStore.UpdateSerdeURIRetVal retVal, - String serdePropKey, boolean isDryRun) { - String tblName = "SERDE_PARAMS"; - - if (retVal == null) { - System.err.println("Encountered error while executing updateSerdeURI - " + - "commit of JDO transaction failed. Failed to update FSRoot locations in " + - "value field corresponding to " + serdePropKey + " in " + tblName + " table."); - } else { - Map updateLocations = retVal.getUpdateLocations(); - if (isDryRun) { - System.out.println("Dry Run of updateLocation on table " + tblName + ".."); - } else { - System.out.println("Successfully updated the following locations.."); - } - int count = printUpdateLocations(updateLocations); - if (isDryRun) { - System.out.println("Found " + count + " records in " + tblName + " table to update"); - } else { - System.out.println("Updated " + count + " records in " + tblName + " table"); - } - List badRecords = retVal.getBadRecords(); - if (badRecords.size() > 0) { - System.err.println("Warning: Found records with bad " + serdePropKey + " key in " + - tblName + " table.. "); - for (String badRecord:badRecords) { - System.err.println("bad location URI: " + badRecord); - } - } - } - } - - public void updateFSRootLocation(URI oldURI, URI newURI, String serdePropKey, String - tablePropKey, boolean isDryRun) { - Configuration conf = MetastoreConf.newMetastoreConf(); - initObjectStore(conf); - - System.out.println("Looking for LOCATION_URI field in DBS table to update.."); - ObjectStore.UpdateMDatabaseURIRetVal updateMDBURIRetVal = objStore.updateMDatabaseURI(oldURI, - newURI, isDryRun); - printDatabaseURIUpdateSummary(updateMDBURIRetVal, isDryRun); - - System.out.println("Looking for LOCATION field in SDS table to update.."); - ObjectStore.UpdateMStorageDescriptorTblURIRetVal updateTblURIRetVal = - objStore.updateMStorageDescriptorTblURI(oldURI, newURI, isDryRun); - printTblURIUpdateSummary(updateTblURIRetVal, isDryRun); - - if (tablePropKey != null) { - System.out.println("Looking for value of " + tablePropKey + " key in TABLE_PARAMS table " + - "to update.."); - ObjectStore.UpdatePropURIRetVal updateTblPropURIRetVal = - objStore.updateTblPropURI(oldURI, newURI, - tablePropKey, isDryRun); - printPropURIUpdateSummary(updateTblPropURIRetVal, tablePropKey, isDryRun, "TABLE_PARAMS", - "updateTblPropURI"); - - System.out.println("Looking for value of " + tablePropKey + " key in SD_PARAMS table " + - "to update.."); - ObjectStore.UpdatePropURIRetVal updatePropURIRetVal = objStore - .updateMStorageDescriptorTblPropURI(oldURI, newURI, tablePropKey, isDryRun); - printPropURIUpdateSummary(updatePropURIRetVal, tablePropKey, isDryRun, "SD_PARAMS", - "updateMStorageDescriptorTblPropURI"); - } - - if (serdePropKey != null) { - System.out.println("Looking for value of " + serdePropKey + " key in SERDE_PARAMS table " + - "to update.."); - ObjectStore.UpdateSerdeURIRetVal updateSerdeURIretVal = objStore.updateSerdeURI(oldURI, - newURI, serdePropKey, isDryRun); - printSerdePropURIUpdateSummary(updateSerdeURIretVal, serdePropKey, isDryRun); - } - } - private static void printAndExit(HiveMetaTool metaTool) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp("metatool", metaTool.cmdLineOptions); - System.exit(1); - } - - public static void main(String[] args) { - HiveMetaTool metaTool = new HiveMetaTool(); - metaTool.init(); - CommandLineParser parser = new GnuParser(); - CommandLine line = null; - - try { - try { - line = parser.parse(metaTool.cmdLineOptions, args); - } catch (ParseException e) { - System.err.println("HiveMetaTool:Parsing failed. Reason: " + e.getLocalizedMessage()); - printAndExit(metaTool); - } - - if (line.hasOption("help")) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp("metatool", metaTool.cmdLineOptions); - } else if (line.hasOption("listFSRoot")) { - if (line.hasOption("dryRun")) { - System.err.println("HiveMetaTool: dryRun is not valid with listFSRoot"); - printAndExit(metaTool); - } else if (line.hasOption("serdePropKey")) { - System.err.println("HiveMetaTool: serdePropKey is not valid with listFSRoot"); - printAndExit(metaTool); - } else if (line.hasOption("tablePropKey")) { - System.err.println("HiveMetaTool: tablePropKey is not valid with listFSRoot"); - printAndExit(metaTool); - } - metaTool.listFSRoot(); - } else if (line.hasOption("executeJDOQL")) { - String query = line.getOptionValue("executeJDOQL"); - if (line.hasOption("dryRun")) { - System.err.println("HiveMetaTool: dryRun is not valid with executeJDOQL"); - printAndExit(metaTool); - } else if (line.hasOption("serdePropKey")) { - System.err.println("HiveMetaTool: serdePropKey is not valid with executeJDOQL"); - printAndExit(metaTool); - } else if (line.hasOption("tablePropKey")) { - System.err.println("HiveMetaTool: tablePropKey is not valid with executeJDOQL"); - printAndExit(metaTool); - } - if (query.toLowerCase().trim().startsWith("select")) { - metaTool.executeJDOQLSelect(query); - } else if (query.toLowerCase().trim().startsWith("update")) { - metaTool.executeJDOQLUpdate(query); - } else { - System.err.println("HiveMetaTool:Unsupported statement type"); - printAndExit(metaTool); - } - } else if (line.hasOption("updateLocation")) { - String[] loc = line.getOptionValues("updateLocation"); - boolean isDryRun = false; - String serdepropKey = null; - String tablePropKey = null; - - if (loc.length != 2 && loc.length != 3) { - System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 " + - "optional arguments but " + - "was passed " + loc.length + " arguments"); - printAndExit(metaTool); - } - - Path newPath = new Path(loc[0]); - Path oldPath = new Path(loc[1]); - - URI oldURI = oldPath.toUri(); - URI newURI = newPath.toUri(); - - if (line.hasOption("dryRun")) { - isDryRun = true; - } - - if (line.hasOption("serdePropKey")) { - serdepropKey = line.getOptionValue("serdePropKey"); - } - - if (line.hasOption("tablePropKey")) { - tablePropKey = line.getOptionValue("tablePropKey"); - } - - /* - * validate input - Both new and old URI should contain valid host names and valid schemes. - * port is optional in both the URIs since HDFS HA NN URI doesn't have a port. - */ - if (oldURI.getHost() == null || newURI.getHost() == null) { - System.err.println("HiveMetaTool:A valid host is required in both old-loc and new-loc"); - } else if (oldURI.getScheme() == null || newURI.getScheme() == null) { - System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc"); - } else { - metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, tablePropKey, isDryRun); - } - } else { - if (line.hasOption("dryRun")) { - System.err.println("HiveMetaTool: dryRun is not a valid standalone option"); - } else if (line.hasOption("serdePropKey")) { - System.err.println("HiveMetaTool: serdePropKey is not a valid standalone option"); - } else if (line.hasOption("tablePropKey")) { - System.err.println("HiveMetaTool: tablePropKey is not a valid standalone option"); - printAndExit(metaTool); - } else { - System.err.print("HiveMetaTool:Parsing failed. Reason: Invalid arguments: " ); - for (String s : line.getArgs()) { - System.err.print(s + " "); - } - System.err.println(); - } - printAndExit(metaTool); - } - } finally { - metaTool.shutdownObjectStore(); - } - } -} \ No newline at end of file diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/HiveMetaTool.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/HiveMetaTool.java new file mode 100644 index 0000000..760d78d --- /dev/null +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/HiveMetaTool.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class provides Hive admins a tool. The following can be done with it: + * - list the file system root + * - execute JDOQL against the metastore using DataNucleus + * - perform HA name node upgrade + */ +public final class HiveMetaTool { + private static final Logger LOGGER = LoggerFactory.getLogger(HiveMetaTool.class.getName()); + + private HiveMetaTool() { + throw new UnsupportedOperationException("HiveMetaTool should not be instantiated"); + } + + public static void main(String[] args) { + HiveMetaToolCommandLine cl = HiveMetaToolCommandLine.parseArguments(args); + + ObjectStore objectStore = new ObjectStore(); + objectStore.setConf(MetastoreConf.newMetastoreConf()); + + MetaToolTask task = null; + try { + if (cl.isListFSRoot()) { + task = new MetaToolTaskListFSRoot(); + } else if (cl.isExecuteJDOQL()) { + task = new MetaToolTaskExecuteJDOQLQuery(); + } else if (cl.isUpdateLocation()) { + task = new MetaToolTaskUpdateLocation(); + } else { + throw new IllegalArgumentException("No task was specified!"); + } + + task.setObjectStore(objectStore); + task.setCommandLine(cl); + task.execute(); + } catch (Exception e) { + LOGGER.error("Exception occured", e); + } finally { + objectStore.shutdown(); + } + } +} diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/HiveMetaToolCommandLine.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/HiveMetaToolCommandLine.java new file mode 100644 index 0000000..1223f0d --- /dev/null +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/HiveMetaToolCommandLine.java @@ -0,0 +1,219 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import java.util.Arrays; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jline.TerminalFactory; + +class HiveMetaToolCommandLine { + private static final Logger LOGGER = LoggerFactory.getLogger(HiveMetaToolCommandLine.class.getName()); + + @SuppressWarnings("static-access") + private static final Option LIST_FS_ROOT = OptionBuilder + .withDescription("print the current FS root locations") + .create("listFSRoot"); + + @SuppressWarnings("static-access") + private static final Option EXECUTE_JDOQL = OptionBuilder + .withArgName("query-string") + .hasArgs() + .withDescription("execute the given JDOQL query") + .create("executeJDOQL"); + + @SuppressWarnings("static-access") + private static final Option UPDATE_LOCATION = OptionBuilder + .withArgName("new-loc> " + " result = getObjectStore().executeJDOQLSelect(query, queryWrapper); + if (result != null) { + Iterator iter = result.iterator(); + while (iter.hasNext()) { + Object o = iter.next(); + System.out.println(o.toString()); + } + } else { + System.err.println("Encountered error during executeJDOQLSelect"); + } + } + } + + private void executeJDOQLUpdate(String query) { + System.out.println("Executing query: " + query); + long numUpdated = getObjectStore().executeJDOQLUpdate(query); + if (numUpdated >= 0) { + System.out.println("Number of records updated: " + numUpdated); + } else { + System.err.println("Encountered error during executeJDOQL - commit of JDO transaction failed."); + } + } +} diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/MetaToolTaskListFSRoot.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/MetaToolTaskListFSRoot.java new file mode 100644 index 0000000..2d9234b --- /dev/null +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/MetaToolTaskListFSRoot.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import java.util.Set; + +class MetaToolTaskListFSRoot extends MetaToolTask { + @Override + void execute() { + Set hdfsRoots = getObjectStore().listFSRoots(); + if (hdfsRoots != null) { + System.out.println("Listing FS Roots.."); + for (String s : hdfsRoots) { + System.out.println(s); + } + } else { + System.err.println("Encountered error during listFSRoot"); + } + } +} diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/MetaToolTaskUpdateLocation.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/MetaToolTaskUpdateLocation.java new file mode 100644 index 0000000..299d94b --- /dev/null +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/MetaToolTaskUpdateLocation.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +class MetaToolTaskUpdateLocation extends MetaToolTask { + private static final Logger LOGGER = LoggerFactory.getLogger(MetaToolTaskUpdateLocation.class.getName()); + + @Override + void execute() { + String[] loc = getCl().getUpddateLocationParams(); + + Path newPath = new Path(loc[0]); + Path oldPath = new Path(loc[1]); + + URI oldURI = oldPath.toUri(); + URI newURI = newPath.toUri(); + + /* + * validate input - Both new and old URI should contain valid host names and valid schemes. + * port is optional in both the URIs since HDFS HA NN URI doesn't have a port. + */ + if (oldURI.getHost() == null || newURI.getHost() == null) { + throw new IllegalStateException("HiveMetaTool:A valid host is required in both old-loc and new-loc"); + } else if (oldURI.getScheme() == null || newURI.getScheme() == null) { + throw new IllegalStateException("HiveMetaTool:A valid scheme is required in both old-loc and new-loc"); + } + + updateFSRootLocation(oldURI, newURI, getCl().getSerdePropKey(), getCl().getTablePropKey(), getCl().isDryRun()); + } + + private void updateFSRootLocation(URI oldURI, URI newURI, String serdePropKey, String tablePropKey, + boolean isDryRun) { + updateMDatabaseURI(oldURI, newURI, isDryRun); + updateMStorageDescriptorTblURI(oldURI, newURI, isDryRun); + updateTablePropURI(oldURI, newURI, tablePropKey, isDryRun); + upateSerdeURI(oldURI, newURI, serdePropKey, isDryRun); + } + + private void updateMDatabaseURI(URI oldURI, URI newURI, boolean isDryRun) { + System.out.println("Looking for LOCATION_URI field in DBS table to update.."); + ObjectStore.UpdateMDatabaseURIRetVal retVal = getObjectStore().updateMDatabaseURI(oldURI, newURI, isDryRun); + if (retVal == null) { + System.err.println("Encountered error while executing updateMDatabaseURI - commit of JDO transaction failed. " + + "Failed to update FSRoot locations in LOCATION_URI field in DBS table."); + } else { + printUpdateLocations(retVal.getUpdateLocations(), isDryRun, "DBS"); + printBadRecords(retVal.getBadRecords(), "DBS", "LOCATION_URI"); + } + } + + private void updateMStorageDescriptorTblURI(URI oldURI, URI newURI, boolean isDryRun) { + System.out.println("Looking for LOCATION field in SDS table to update.."); + ObjectStore.UpdateMStorageDescriptorTblURIRetVal retVal = + getObjectStore().updateMStorageDescriptorTblURI(oldURI, newURI, isDryRun); + if (retVal == null) { + System.err.println("Encountered error while executing updateMStorageDescriptorTblURI - commit of JDO " + + "transaction failed. Failed to update FSRoot locations in LOCATION field in SDS table."); + } else { + printUpdateLocations(retVal.getUpdateLocations(), isDryRun, "SDS"); + printBadRecords(retVal.getBadRecords(), "SDS", "LOCATION"); + + int numNullRecords = retVal.getNumNullRecords(); + if (numNullRecords != 0) { + LOGGER.debug("Number of NULL location URI: " + numNullRecords + ". This can happen for View or Index."); + } + } + } + + private void updateTablePropURI(URI oldURI, URI newURI, String tablePropKey, boolean isDryRun) { + if (tablePropKey != null) { + System.out.println("Looking for value of " + tablePropKey + " key in TABLE_PARAMS table to update.."); + ObjectStore.UpdatePropURIRetVal updateTblPropURIRetVal = + getObjectStore().updateTblPropURI(oldURI, newURI, tablePropKey, isDryRun); + printPropURIUpdateSummary(updateTblPropURIRetVal, tablePropKey, isDryRun, "TABLE_PARAMS", "updateTblPropURI"); + + System.out.println("Looking for value of " + tablePropKey + " key in SD_PARAMS table to update.."); + ObjectStore.UpdatePropURIRetVal updatePropURIRetVal = + getObjectStore().updateMStorageDescriptorTblPropURI(oldURI, newURI, tablePropKey, isDryRun); + printPropURIUpdateSummary(updatePropURIRetVal, tablePropKey, isDryRun, "SD_PARAMS", + "updateMStorageDescriptorTblPropURI"); + } + } + + private void printPropURIUpdateSummary(ObjectStore.UpdatePropURIRetVal retVal, String tablePropKey, boolean isDryRun, + String tblName, String methodName) { + if (retVal == null) { + System.err.println("Encountered error while executing " + methodName + " - commit of JDO transaction failed. " + + "Failed to update FSRoot locations in value field corresponding to" + tablePropKey + " in " + tblName + + " table."); + } else { + printUpdateLocations(retVal.getUpdateLocations(), isDryRun, tblName); + printBadRecords(retVal.getBadRecords(), tblName, tablePropKey + " key"); + } + } + + private void upateSerdeURI(URI oldURI, URI newURI, String serdePropKey, boolean isDryRun) { + if (serdePropKey != null) { + System.out.println("Looking for value of " + serdePropKey + " key in SERDE_PARAMS table to update.."); + ObjectStore.UpdateSerdeURIRetVal retVal = + getObjectStore().updateSerdeURI(oldURI, newURI, serdePropKey, isDryRun); + if (retVal == null) { + System.err.println("Encountered error while executing updateSerdeURI - commit of JDO transaction failed. " + + "Failed to update FSRoot locations in value field corresponding to " + serdePropKey + " in " + + "SERDE_PARAMS table."); + } else { + printUpdateLocations(retVal.getUpdateLocations(), isDryRun, "SERDE_PARAMS"); + printBadRecords(retVal.getBadRecords(), "SERDE_PARAMS", serdePropKey + " key"); + } + } + } + + private void printUpdateLocations(Map updateLocations, boolean isDryRun, String tableName) { + System.out.println(isDryRun ? + "Dry Run of updateLocation on table " + tableName + ".." : + "Successfully updated the following locations.."); + + for (Map.Entry e : updateLocations.entrySet()) { + System.out.println("old location: " + e.getKey() + " new location: " + e.getValue()); + } + + System.out.println(isDryRun ? + "Found " + updateLocations.size() + " records in " + tableName + " table to update" : + "Updated " + updateLocations.size() + " records in " + tableName + " table"); + } + + private void printBadRecords(List badRecords, String tableName, String field) { + if (!badRecords.isEmpty()) { + System.err.println("Warning: Found records with bad " + field + " in " + tableName + " table.. "); + for (String badRecord : badRecords) { + System.err.println("bad location URI: " + badRecord); + } + } + } +} diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java new file mode 100644 index 0000000..dd2aacd --- /dev/null +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * This package contains the HiveMetaTool program and the classes used by it. + */ +package org.apache.hadoop.hive.metastore.tool.metatool; + diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaToolCommandLine.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaToolCommandLine.java new file mode 100644 index 0000000..9563bd6 --- /dev/null +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaToolCommandLine.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import org.apache.commons.cli.ParseException; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.ExpectedException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +/** Unit tests for HiveMetaToolCommandLine. */ +@Category(MetastoreUnitTest.class) +public class TestHiveMetaToolCommandLine { + @Rule + public final ExpectedException exception = ExpectedException.none(); + + @Test + public void testParseListFSRoot() throws ParseException { + HiveMetaToolCommandLine cl = new HiveMetaToolCommandLine(new String[] {"-listFSRoot"}); + assertTrue(cl.isListFSRoot()); + assertFalse(cl.isExecuteJDOQL()); + assertNull(cl.getJDOQLQuery()); + assertFalse(cl.isUpdateLocation()); + assertNull(cl.getUpddateLocationParams()); + assertFalse(cl.isDryRun()); + assertNull(cl.getSerdePropKey()); + assertNull(cl.getTablePropKey()); + } + + @Test + public void testParseExecuteJDOQL() throws ParseException { + HiveMetaToolCommandLine cl = new HiveMetaToolCommandLine(new String[] {"-executeJDOQL", "select a from b"}); + assertFalse(cl.isListFSRoot()); + assertTrue(cl.isExecuteJDOQL()); + assertEquals("select a from b", cl.getJDOQLQuery()); + assertFalse(cl.isUpdateLocation()); + assertNull(cl.getUpddateLocationParams()); + assertFalse(cl.isDryRun()); + assertNull(cl.getSerdePropKey()); + assertNull(cl.getTablePropKey()); + } + + @Test + public void testParseUpdateLocation() throws ParseException { + String[] args = new String[] {"-updateLocation", "hdfs://new.loc", "hdfs://old.loc", "-dryRun", "-serdePropKey", + "abc", "-tablePropKey", "def"}; + HiveMetaToolCommandLine cl = new HiveMetaToolCommandLine(args); + assertFalse(cl.isListFSRoot()); + assertFalse(cl.isExecuteJDOQL()); + assertNull(cl.getJDOQLQuery()); + assertTrue(cl.isUpdateLocation()); + assertEquals("hdfs://new.loc", cl.getUpddateLocationParams()[0]); + assertEquals("hdfs://old.loc", cl.getUpddateLocationParams()[1]); + assertTrue(cl.isDryRun()); + assertEquals("abc", cl.getSerdePropKey()); + assertEquals("def", cl.getTablePropKey()); + } + + @Test + public void testNoTask() throws ParseException { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("exectly one of -listFSRoot, -executeJDOQL, -updateLocation must be set"); + + new HiveMetaToolCommandLine(new String[] {}); + } + + @Test + public void testMultipleTask() throws ParseException { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("exectly one of -listFSRoot, -executeJDOQL, -updateLocation must be set"); + + new HiveMetaToolCommandLine(new String[] {"-listFSRoot", "-executeJDOQL", "select a from b"}); + } + + @Test + public void testUpdateLocationOneArgument() throws ParseException { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("HiveMetaTool:updateLocation takes in 2 arguments but was passed 1 arguments"); + + new HiveMetaToolCommandLine(new String[] {"-updateLocation", "hdfs://abc.de"}); + } + + @Test + public void testDryRunNotAllowed() throws ParseException { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("-dryRun, -serdePropKey, -tablePropKey may be used only for the -updateLocation command"); + + new HiveMetaToolCommandLine(new String[] {"-listFSRoot", "-dryRun"}); + } + + @Test + public void testSerdePropKeyNotAllowed() throws ParseException { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("-dryRun, -serdePropKey, -tablePropKey may be used only for the -updateLocation command"); + + new HiveMetaToolCommandLine(new String[] {"-listFSRoot", "-serdePropKey", "abc"}); + } + + @Test + public void testTablePropKeyNotAllowed() throws ParseException { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("-dryRun, -serdePropKey, -tablePropKey may be used only for the -updateLocation command"); + + new HiveMetaToolCommandLine(new String[] {"-executeJDOQL", "select a from b", "-tablePropKey", "abc"}); + } +} diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskExecuteJDOQLQuery.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskExecuteJDOQLQuery.java new file mode 100644 index 0000000..7976a5e --- /dev/null +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskExecuteJDOQLQuery.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.when; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.io.PrintStream; +import java.util.Arrays; + +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.ExpectedException; +import org.mockito.Mockito; + +/** Unit tests for MetaToolTaskExecuteJDOQLQuery. */ +@Category(MetastoreUnitTest.class) +public class TestMetaToolTaskExecuteJDOQLQuery { + @Rule + public final ExpectedException exception = ExpectedException.none(); + + private OutputStream os; + + @Before + public void setup() { + os = new ByteArrayOutputStream(); + System.setOut(new PrintStream(os)); + System.setErr(new PrintStream(os)); + } + + @Test + public void testSelectQuery() throws Exception { + String selectQuery = "select a from b"; + String entry1 = "abc"; + String entry2 = "def"; + + ObjectStore mockObjectStore = Mockito.mock(ObjectStore.class); + doReturn(Arrays.asList(entry1, entry2)) + .when(mockObjectStore).executeJDOQLSelect(eq(selectQuery), any(ObjectStore.QueryWrapper.class)); + + MetaToolTaskExecuteJDOQLQuery t = new MetaToolTaskExecuteJDOQLQuery(); + t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-executeJDOQL", selectQuery})); + t.setObjectStore(mockObjectStore); + t.execute(); + + assertTrue(os.toString() + " doesn't contain " + entry1, os.toString().contains(entry1)); + assertTrue(os.toString() + " doesn't contain " + entry2, os.toString().contains(entry2)); + } + + @Test + public void testUpdateQuerySuccessful() throws Exception { + testUpdateQuery(1L, "Number of records updated: 1"); + } + + @Test + public void testUpdateQueryNotSuccessful() throws Exception { + testUpdateQuery(-1L, "Encountered error during executeJDOQL - commit of JDO transaction failed."); + } + + private void testUpdateQuery(long ret, String expected) throws Exception { + String updateQuery = "update a set b = 'c'"; + + ObjectStore mockObjectStore = Mockito.mock(ObjectStore.class); + when(mockObjectStore.executeJDOQLUpdate(updateQuery)).thenReturn(ret); + + MetaToolTaskExecuteJDOQLQuery t = new MetaToolTaskExecuteJDOQLQuery(); + t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-executeJDOQL", updateQuery})); + t.setObjectStore(mockObjectStore); + t.execute(); + + assertTrue(os.toString() + "doesn't contain " + expected, os.toString().contains(expected)); + } + + @Test + public void testIllegalQuery() throws Exception { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("HiveMetaTool:Unsupported statement type, only select and update supported"); + + String illegalQuery = "abcde"; + MetaToolTaskExecuteJDOQLQuery t = new MetaToolTaskExecuteJDOQLQuery(); + t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-executeJDOQL", illegalQuery})); + t.execute(); + } +} diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskListFSRoot.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskListFSRoot.java new file mode 100644 index 0000000..27c3aa5 --- /dev/null +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskListFSRoot.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.when; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.io.PrintStream; + +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.Mockito; + +import com.google.common.collect.Sets; + +/** Unit tests for MetaToolTaskListFSRoot. */ +@Category(MetastoreUnitTest.class) +public class TestMetaToolTaskListFSRoot { + @Test + public void testListFSRoot() throws Exception { + String fsRoot1 = "hdfs://abc.de"; + String fsRoot2 = "hdfs://fgh.ji"; + + ObjectStore mockObjectStore = Mockito.mock(ObjectStore.class); + when(mockObjectStore.listFSRoots()).thenReturn(Sets.newHashSet(fsRoot1, fsRoot2)); + + OutputStream os = new ByteArrayOutputStream(); + System.setOut(new PrintStream(os)); + + MetaToolTaskListFSRoot t = new MetaToolTaskListFSRoot(); + t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-listFSRoot"})); + t.setObjectStore(mockObjectStore); + t.execute(); + + assertTrue(os.toString() + " doesn't contain " + fsRoot1, os.toString().contains(fsRoot1)); + assertTrue(os.toString() + " doesn't contain " + fsRoot2, os.toString().contains(fsRoot2)); + } +} diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskUpdateLocation.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskUpdateLocation.java new file mode 100644 index 0000000..63868b5 --- /dev/null +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskUpdateLocation.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.tools.metatool; + +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.when; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.io.PrintStream; +import java.net.URI; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.ExpectedException; +import org.mockito.Mockito; + +/** Unit tests for MetaToolTaskUpdateLocation. */ +@Category(MetastoreUnitTest.class) +public class TestMetaToolTaskUpdateLocation { + @Rule + public final ExpectedException exception = ExpectedException.none(); + + private OutputStream os; + + @Before + public void setup() { + os = new ByteArrayOutputStream(); + System.setOut(new PrintStream(os)); + System.setErr(new PrintStream(os)); + } + + @Test + public void testNoHost() throws Exception { + exception.expect(IllegalStateException.class); + exception.expectMessage("HiveMetaTool:A valid host is required in both old-loc and new-loc"); + + MetaToolTaskUpdateLocation t = new MetaToolTaskUpdateLocation(); + t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-updateLocation", "hdfs://", "hdfs://"})); + t.execute(); + } + + @Test + public void testNoScheme() throws Exception { + exception.expect(IllegalStateException.class); + exception.expectMessage("HiveMetaTool:A valid scheme is required in both old-loc and new-loc"); + + MetaToolTaskUpdateLocation t = new MetaToolTaskUpdateLocation(); + t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-updateLocation", "//old.host", "//new.host"})); + t.execute(); + } + + @Test + public void testUpdateLocationNoUpdate() throws Exception { + // testing only that the proper functions are called on ObjectStore - effect tested in TestHiveMetaTool in itests + String oldUriString = "hdfs://old.host"; + String newUriString = "hdfs://new.host"; + String tablePropKey = "abc"; + String serdePropKey = "def"; + + URI oldUri = new Path(oldUriString).toUri(); + URI newUri = new Path(newUriString).toUri(); + + ObjectStore mockObjectStore = Mockito.mock(ObjectStore.class); + when(mockObjectStore.updateMDatabaseURI(eq(oldUri), eq(newUri), eq(true))).thenReturn(null); + when(mockObjectStore.updateMStorageDescriptorTblURI(eq(oldUri), eq(newUri), eq(true))).thenReturn(null); + when(mockObjectStore.updateTblPropURI(eq(oldUri), eq(newUri), eq(tablePropKey), eq(true))).thenReturn(null); + when(mockObjectStore.updateMStorageDescriptorTblPropURI(eq(oldUri), eq(newUri), eq(tablePropKey), eq(true))) + .thenReturn(null); + when(mockObjectStore.updateSerdeURI(eq(oldUri), eq(newUri), eq(serdePropKey), eq(true))).thenReturn(null); + + MetaToolTaskUpdateLocation t = new MetaToolTaskUpdateLocation(); + t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-updateLocation", newUriString, oldUriString, "-dryRun", + "-tablePropKey", tablePropKey, "-serdePropKey", serdePropKey})); + t.setObjectStore(mockObjectStore); + t.execute(); + } +} diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java new file mode 100644 index 0000000..d573757 --- /dev/null +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Unit tests for the HiveMetaTool program. */ +package org.apache.hadoop.hive.metastore.tools.metatool;