diff --git a/beeline/src/java/org/apache/hive/beeline/schematool/HiveSchemaTool.java b/beeline/src/java/org/apache/hive/beeline/schematool/HiveSchemaTool.java
index c723476..223bda7 100644
--- a/beeline/src/java/org/apache/hive/beeline/schematool/HiveSchemaTool.java
+++ b/beeline/src/java/org/apache/hive/beeline/schematool/HiveSchemaTool.java
@@ -25,10 +25,10 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaException;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.NestedScriptParser;
-import org.apache.hadoop.hive.metastore.tools.MetastoreSchemaTool;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper;
+import org.apache.hadoop.hive.metastore.tools.schematool.MetastoreSchemaTool;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper.MetaStoreConnectionInfo;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper.NestedScriptParser;
import org.apache.hive.beeline.BeeLine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/beeline/src/test/org/apache/hive/beeline/schematool/TestHiveSchemaTool.java b/beeline/src/test/org/apache/hive/beeline/schematool/TestHiveSchemaTool.java
index 8514dc8..2311b04 100644
--- a/beeline/src/test/org/apache/hive/beeline/schematool/TestHiveSchemaTool.java
+++ b/beeline/src/test/org/apache/hive/beeline/schematool/TestHiveSchemaTool.java
@@ -19,7 +19,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/TestSchemaToolCatalogOps.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/TestSchemaToolCatalogOps.java
deleted file mode 100644
index 43c0b96..0000000
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/TestSchemaToolCatalogOps.java
+++ /dev/null
@@ -1,486 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.text.StrTokenizer;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.Catalog;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.Function;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder;
-import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
-import org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder;
-import org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder;
-import org.apache.hadoop.hive.metastore.client.builder.TableBuilder;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.thrift.TException;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
-import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
-import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
-
-public class TestSchemaToolCatalogOps {
- private static MetastoreSchemaTool schemaTool;
- private static HiveConf conf;
- private IMetaStoreClient client;
- private static String testMetastoreDB;
- private static PrintStream errStream;
- private static PrintStream outStream;
- private static String argsBase;
-
- @BeforeClass
- public static void initDb() throws HiveMetaException, IOException {
- conf = new HiveConf();
- MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.AUTO_CREATE_ALL, false);
- MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.HMS_HANDLER_ATTEMPTS, 1);
- MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.THRIFT_CONNECTION_RETRIES, 1);
- testMetastoreDB = System.getProperty("java.io.tmpdir") +
- File.separator + "testschematoolcatopsdb";
- MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECT_URL_KEY,
- "jdbc:derby:" + testMetastoreDB + ";create=true");
- schemaTool = new MetastoreSchemaTool();
- schemaTool.init(System.getProperty("test.tmp.dir", "target/tmp"),
- new String[]{"-dbType", "derby", "--info"}, null, conf);
-
- String userName = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.CONNECTION_USER_NAME);
- String passWord = MetastoreConf.getPassword(conf, MetastoreConf.ConfVars.PWD);
- schemaTool.setUserName(userName);
- schemaTool.setPassWord(passWord);
- errStream = System.err;
- outStream = System.out;
-
- argsBase = "-dbType derby -userName " + userName + " -passWord " + passWord + " ";
- execute(new SchemaToolTaskInit(), "-initSchema"); // Pre-install the database so all the tables are there.
- }
-
- @AfterClass
- public static void removeDb() throws Exception {
- File metaStoreDir = new File(testMetastoreDB);
- if (metaStoreDir.exists()) {
- FileUtils.forceDeleteOnExit(metaStoreDir);
- }
- System.setOut(outStream);
- System.setErr(errStream);
- }
-
- @Before
- public void createClient() throws MetaException {
- client = new HiveMetaStoreClient(conf);
- }
-
- @Test
- public void createCatalog() throws HiveMetaException, TException {
- String catName = "my_test_catalog";
- String location = "file:///tmp/my_test_catalog";
- String description = "very descriptive";
- String argsCreate = String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
- catName, location, description);
- execute(new SchemaToolTaskCreateCatalog(), argsCreate);
-
- Catalog cat = client.getCatalog(catName);
- Assert.assertEquals(location, cat.getLocationUri());
- Assert.assertEquals(description, cat.getDescription());
- }
-
- @Test(expected = HiveMetaException.class)
- public void createExistingCatalog() throws HiveMetaException {
- String catName = "hive";
- String location = "somewhere";
- String argsCreate = String.format("-createCatalog %s -catalogLocation \"%s\"",
- catName, location);
- execute(new SchemaToolTaskCreateCatalog(), argsCreate);
- }
-
- @Test
- public void createExistingCatalogWithIfNotExists() throws HiveMetaException {
- String catName = "my_existing_test_catalog";
- String location = "file:///tmp/my_test_catalog";
- String description = "very descriptive";
- String argsCreate1 = String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
- catName, location, description);
- execute(new SchemaToolTaskCreateCatalog(), argsCreate1);
-
- String argsCreate2 =
- String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\" -ifNotExists",
- catName, location, description);
- execute(new SchemaToolTaskCreateCatalog(), argsCreate2);
- }
-
- @Test
- public void alterCatalog() throws HiveMetaException, TException {
- String catName = "an_alterable_catalog";
- String location = "file:///tmp/an_alterable_catalog";
- String description = "description";
- String argsCreate = String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
- catName, location, description);
- execute(new SchemaToolTaskCreateCatalog(), argsCreate);
-
- location = "file:///tmp/somewhere_else";
- String argsAlter1 = String.format("-alterCatalog %s -catalogLocation \"%s\"",
- catName, location);
- execute(new SchemaToolTaskAlterCatalog(), argsAlter1);
- Catalog cat = client.getCatalog(catName);
- Assert.assertEquals(location, cat.getLocationUri());
- Assert.assertEquals(description, cat.getDescription());
-
- description = "a better description";
- String argsAlter2 = String.format("-alterCatalog %s -catalogDescription \"%s\"",
- catName, description);
- execute(new SchemaToolTaskAlterCatalog(), argsAlter2);
- cat = client.getCatalog(catName);
- Assert.assertEquals(location, cat.getLocationUri());
- Assert.assertEquals(description, cat.getDescription());
-
- location = "file:///tmp/a_third_location";
- description = "best description yet";
- String argsAlter3 = String.format("-alterCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
- catName, location, description);
- execute(new SchemaToolTaskAlterCatalog(), argsAlter3);
- cat = client.getCatalog(catName);
- Assert.assertEquals(location, cat.getLocationUri());
- Assert.assertEquals(description, cat.getDescription());
- }
-
- @Test(expected = HiveMetaException.class)
- public void alterBogusCatalog() throws HiveMetaException {
- String catName = "nosuch";
- String location = "file:///tmp/somewhere";
- String description = "whatever";
- String argsAlter = String.format("-alterCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
- catName, location, description);
- execute(new SchemaToolTaskAlterCatalog(), argsAlter);
- }
-
- @Test(expected = HiveMetaException.class)
- public void alterCatalogNoChange() throws HiveMetaException {
- String catName = "alter_cat_no_change";
- String location = "file:///tmp/alter_cat_no_change";
- String description = "description";
- String argsCreate = String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
- catName, location, description);
- execute(new SchemaToolTaskCreateCatalog(), argsCreate);
-
- String argsAlter = String.format("-alterCatalog %s", catName);
- execute(new SchemaToolTaskAlterCatalog(), argsAlter);
- }
-
- @Test
- public void moveDatabase() throws HiveMetaException, TException {
- String toCatName = "moveDbCat";
- String dbName = "moveDbDb";
- String tableName = "moveDbTable";
- String funcName = "movedbfunc";
- String partVal = "moveDbKey";
-
- new CatalogBuilder()
- .setName(toCatName)
- .setLocation("file:///tmp")
- .create(client);
-
- Database db = new DatabaseBuilder()
- .setCatalogName(DEFAULT_CATALOG_NAME)
- .setName(dbName)
- .create(client, conf);
-
- new FunctionBuilder()
- .inDb(db)
- .setName(funcName)
- .setClass("org.apache.hive.myudf")
- .create(client, conf);
-
- Table table = new TableBuilder()
- .inDb(db)
- .setTableName(tableName)
- .addCol("a", "int")
- .addPartCol("p", "string")
- .create(client, conf);
-
- new PartitionBuilder()
- .inTable(table)
- .addValue(partVal)
- .addToTable(client, conf);
-
- String argsMoveDB = String.format("-moveDatabase %s -fromCatalog %s -toCatalog %s", dbName,
- DEFAULT_CATALOG_NAME, toCatName);
- execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
-
- Database fetchedDb = client.getDatabase(toCatName, dbName);
- Assert.assertNotNull(fetchedDb);
- Assert.assertEquals(toCatName.toLowerCase(), fetchedDb.getCatalogName());
-
- Function fetchedFunction = client.getFunction(toCatName, dbName, funcName);
- Assert.assertNotNull(fetchedFunction);
- Assert.assertEquals(toCatName.toLowerCase(), fetchedFunction.getCatName());
- Assert.assertEquals(dbName.toLowerCase(), fetchedFunction.getDbName());
-
- Table fetchedTable = client.getTable(toCatName, dbName, tableName);
- Assert.assertNotNull(fetchedTable);
- Assert.assertEquals(toCatName.toLowerCase(), fetchedTable.getCatName());
- Assert.assertEquals(dbName.toLowerCase(), fetchedTable.getDbName());
-
- Partition fetchedPart =
- client.getPartition(toCatName, dbName, tableName, Collections.singletonList(partVal));
- Assert.assertNotNull(fetchedPart);
- Assert.assertEquals(toCatName.toLowerCase(), fetchedPart.getCatName());
- Assert.assertEquals(dbName.toLowerCase(), fetchedPart.getDbName());
- Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
- }
-
- @Test
- public void moveDatabaseWithExistingDbOfSameNameAlreadyInTargetCatalog()
- throws TException, HiveMetaException {
- String catName = "clobberCatalog";
- new CatalogBuilder()
- .setName(catName)
- .setLocation("file:///tmp")
- .create(client);
- try {
- String argsMoveDB = String.format("-moveDatabase %s -fromCatalog %s -toCatalog %s",
- DEFAULT_DATABASE_NAME, catName, DEFAULT_CATALOG_NAME);
- execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
- Assert.fail("Attempt to move default database should have failed.");
- } catch (HiveMetaException e) {
- // good
- }
-
- // Make sure nothing really moved
- Set dbNames = new HashSet<>(client.getAllDatabases(DEFAULT_CATALOG_NAME));
- Assert.assertTrue(dbNames.contains(DEFAULT_DATABASE_NAME));
- }
-
- @Test(expected = HiveMetaException.class)
- public void moveNonExistentDatabase() throws TException, HiveMetaException {
- String catName = "moveNonExistentDb";
- new CatalogBuilder()
- .setName(catName)
- .setLocation("file:///tmp")
- .create(client);
- String argsMoveDB = String.format("-moveDatabase nosuch -fromCatalog %s -toCatalog %s",
- catName, DEFAULT_CATALOG_NAME);
- execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
- }
-
- @Test
- public void moveDbToNonExistentCatalog() throws TException, HiveMetaException {
- String dbName = "doomedToHomelessness";
- new DatabaseBuilder()
- .setName(dbName)
- .create(client, conf);
- try {
- String argsMoveDB = String.format("-moveDatabase %s -fromCatalog %s -toCatalog nosuch",
- dbName, DEFAULT_CATALOG_NAME);
- execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
- Assert.fail("Attempt to move database to non-existent catalog should have failed.");
- } catch (HiveMetaException e) {
- // good
- }
-
- // Make sure nothing really moved
- Set dbNames = new HashSet<>(client.getAllDatabases(DEFAULT_CATALOG_NAME));
- Assert.assertTrue(dbNames.contains(dbName.toLowerCase()));
- }
-
- @Test
- public void moveTable() throws TException, HiveMetaException {
- String toCatName = "moveTableCat";
- String toDbName = "moveTableDb";
- String tableName = "moveTableTable";
- String partVal = "moveTableKey";
-
- new CatalogBuilder()
- .setName(toCatName)
- .setLocation("file:///tmp")
- .create(client);
-
- new DatabaseBuilder()
- .setCatalogName(toCatName)
- .setName(toDbName)
- .create(client, conf);
-
- Table table = new TableBuilder()
- .setTableName(tableName)
- .addCol("a", "int")
- .addPartCol("p", "string")
- .create(client, conf);
-
- new PartitionBuilder()
- .inTable(table)
- .addValue(partVal)
- .addToTable(client, conf);
-
- String argsMoveTable = String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s",
- tableName, DEFAULT_CATALOG_NAME, toCatName, DEFAULT_DATABASE_NAME, toDbName);
- execute(new SchemaToolTaskMoveTable(), argsMoveTable);
-
- Table fetchedTable = client.getTable(toCatName, toDbName, tableName);
- Assert.assertNotNull(fetchedTable);
- Assert.assertEquals(toCatName.toLowerCase(), fetchedTable.getCatName());
- Assert.assertEquals(toDbName.toLowerCase(), fetchedTable.getDbName());
-
- Partition fetchedPart =
- client.getPartition(toCatName, toDbName, tableName, Collections.singletonList(partVal));
- Assert.assertNotNull(fetchedPart);
- Assert.assertEquals(toCatName.toLowerCase(), fetchedPart.getCatName());
- Assert.assertEquals(toDbName.toLowerCase(), fetchedPart.getDbName());
- Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
- }
-
- @Test
- public void moveTableWithinCatalog() throws TException, HiveMetaException {
- String toDbName = "moveTableWithinCatalogDb";
- String tableName = "moveTableWithinCatalogTable";
- String partVal = "moveTableWithinCatalogKey";
-
- new DatabaseBuilder()
- .setName(toDbName)
- .create(client, conf);
-
- Table table = new TableBuilder()
- .setTableName(tableName)
- .addCol("a", "int")
- .addPartCol("p", "string")
- .create(client, conf);
-
- new PartitionBuilder()
- .inTable(table)
- .addValue(partVal)
- .addToTable(client, conf);
-
- String argsMoveTable = String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s",
- tableName, DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME, toDbName);
- execute(new SchemaToolTaskMoveTable(), argsMoveTable);
-
- Table fetchedTable = client.getTable(DEFAULT_CATALOG_NAME, toDbName, tableName);
- Assert.assertNotNull(fetchedTable);
- Assert.assertEquals(DEFAULT_CATALOG_NAME, fetchedTable.getCatName());
- Assert.assertEquals(toDbName.toLowerCase(), fetchedTable.getDbName());
-
- Partition fetchedPart =
- client.getPartition(DEFAULT_CATALOG_NAME, toDbName, tableName, Collections.singletonList(partVal));
- Assert.assertNotNull(fetchedPart);
- Assert.assertEquals(DEFAULT_CATALOG_NAME, fetchedPart.getCatName());
- Assert.assertEquals(toDbName.toLowerCase(), fetchedPart.getDbName());
- Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
- }
-
- @Test
- public void moveTableWithExistingTableOfSameNameAlreadyInTargetDatabase()
- throws TException, HiveMetaException {
- String toDbName = "clobberTableDb";
- String tableName = "clobberTableTable";
-
- Database toDb = new DatabaseBuilder()
- .setName(toDbName)
- .create(client, conf);
-
- new TableBuilder()
- .setTableName(tableName)
- .addCol("a", "int")
- .create(client, conf);
-
- new TableBuilder()
- .inDb(toDb)
- .setTableName(tableName)
- .addCol("b", "varchar(32)")
- .create(client, conf);
-
- try {
- String argsMoveTable =
- String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s",
- tableName, DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME, toDbName);
- execute(new SchemaToolTaskMoveTable(), argsMoveTable);
- Assert.fail("Attempt to move table should have failed.");
- } catch (HiveMetaException e) {
- // good
- }
-
- // Make sure nothing really moved
- Set tableNames = new HashSet<>(client.getAllTables(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME));
- Assert.assertTrue(tableNames.contains(tableName.toLowerCase()));
-
- // Make sure the table in the target database didn't get clobbered
- Table fetchedTable = client.getTable(DEFAULT_CATALOG_NAME, toDbName, tableName);
- Assert.assertEquals("b", fetchedTable.getSd().getCols().get(0).getName());
- }
-
- @Test(expected = HiveMetaException.class)
- public void moveNonExistentTable() throws TException, HiveMetaException {
- String toDbName = "moveNonExistentTable";
- new DatabaseBuilder()
- .setName(toDbName)
- .create(client, conf);
- String argsMoveTable =
- String.format("-moveTable nosuch -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s",
- DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME, toDbName);
- execute(new SchemaToolTaskMoveTable(), argsMoveTable);
- }
-
- @Test
- public void moveTableToNonExistentDb() throws TException, HiveMetaException {
- String tableName = "doomedToWander";
- new TableBuilder()
- .setTableName(tableName)
- .addCol("a", "int")
- .create(client, conf);
-
- try {
- String argsMoveTable =
- String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase nosuch",
- tableName, DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
- execute(new SchemaToolTaskMoveTable(), argsMoveTable);
- Assert.fail("Attempt to move table to non-existent table should have failed.");
- } catch (HiveMetaException e) {
- // good
- }
-
- // Make sure nothing really moved
- Set tableNames = new HashSet<>(client.getAllTables(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME));
- Assert.assertTrue(tableNames.contains(tableName.toLowerCase()));
- }
-
- private static void execute(SchemaToolTask task, String taskArgs) throws HiveMetaException {
- try {
- StrTokenizer tokenizer = new StrTokenizer(argsBase + taskArgs, ' ', '\"');
- SchemaToolCommandLine cl = new SchemaToolCommandLine(tokenizer.getTokenArray(), null);
- task.setCommandLineArguments(cl);
- } catch (Exception e) {
- throw new IllegalStateException("Could not parse comman line \n" + argsBase + taskArgs, e);
- }
-
- task.setHiveSchemaTool(schemaTool);
- task.execute();
- }
-}
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolCatalogOps.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolCatalogOps.java
new file mode 100644
index 0000000..9669cd4
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolCatalogOps.java
@@ -0,0 +1,486 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.tools.schematool;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.text.StrTokenizer;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.TableBuilder;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.thrift.TException;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
+
+public class TestSchemaToolCatalogOps {
+ private static MetastoreSchemaTool schemaTool;
+ private static HiveConf conf;
+ private IMetaStoreClient client;
+ private static String testMetastoreDB;
+ private static PrintStream errStream;
+ private static PrintStream outStream;
+ private static String argsBase;
+
+ @BeforeClass
+ public static void initDb() throws HiveMetaException, IOException {
+ conf = new HiveConf();
+ MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.AUTO_CREATE_ALL, false);
+ MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.HMS_HANDLER_ATTEMPTS, 1);
+ MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.THRIFT_CONNECTION_RETRIES, 1);
+ testMetastoreDB = System.getProperty("java.io.tmpdir") +
+ File.separator + "testschematoolcatopsdb";
+ MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECT_URL_KEY,
+ "jdbc:derby:" + testMetastoreDB + ";create=true");
+ schemaTool = new MetastoreSchemaTool();
+ schemaTool.init(System.getProperty("test.tmp.dir", "target/tmp"),
+ new String[]{"-dbType", "derby", "--info"}, null, conf);
+
+ String userName = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.CONNECTION_USER_NAME);
+ String passWord = MetastoreConf.getPassword(conf, MetastoreConf.ConfVars.PWD);
+ schemaTool.setUserName(userName);
+ schemaTool.setPassWord(passWord);
+ errStream = System.err;
+ outStream = System.out;
+
+ argsBase = "-dbType derby -userName " + userName + " -passWord " + passWord + " ";
+ execute(new SchemaToolTaskInit(), "-initSchema"); // Pre-install the database so all the tables are there.
+ }
+
+ @AfterClass
+ public static void removeDb() throws Exception {
+ File metaStoreDir = new File(testMetastoreDB);
+ if (metaStoreDir.exists()) {
+ FileUtils.forceDeleteOnExit(metaStoreDir);
+ }
+ System.setOut(outStream);
+ System.setErr(errStream);
+ }
+
+ @Before
+ public void createClient() throws MetaException {
+ client = new HiveMetaStoreClient(conf);
+ }
+
+ @Test
+ public void createCatalog() throws HiveMetaException, TException {
+ String catName = "my_test_catalog";
+ String location = "file:///tmp/my_test_catalog";
+ String description = "very descriptive";
+ String argsCreate = String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
+ catName, location, description);
+ execute(new SchemaToolTaskCreateCatalog(), argsCreate);
+
+ Catalog cat = client.getCatalog(catName);
+ Assert.assertEquals(location, cat.getLocationUri());
+ Assert.assertEquals(description, cat.getDescription());
+ }
+
+ @Test(expected = HiveMetaException.class)
+ public void createExistingCatalog() throws HiveMetaException {
+ String catName = "hive";
+ String location = "somewhere";
+ String argsCreate = String.format("-createCatalog %s -catalogLocation \"%s\"",
+ catName, location);
+ execute(new SchemaToolTaskCreateCatalog(), argsCreate);
+ }
+
+ @Test
+ public void createExistingCatalogWithIfNotExists() throws HiveMetaException {
+ String catName = "my_existing_test_catalog";
+ String location = "file:///tmp/my_test_catalog";
+ String description = "very descriptive";
+ String argsCreate1 = String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
+ catName, location, description);
+ execute(new SchemaToolTaskCreateCatalog(), argsCreate1);
+
+ String argsCreate2 =
+ String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\" -ifNotExists",
+ catName, location, description);
+ execute(new SchemaToolTaskCreateCatalog(), argsCreate2);
+ }
+
+ @Test
+ public void alterCatalog() throws HiveMetaException, TException {
+ String catName = "an_alterable_catalog";
+ String location = "file:///tmp/an_alterable_catalog";
+ String description = "description";
+ String argsCreate = String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
+ catName, location, description);
+ execute(new SchemaToolTaskCreateCatalog(), argsCreate);
+
+ location = "file:///tmp/somewhere_else";
+ String argsAlter1 = String.format("-alterCatalog %s -catalogLocation \"%s\"",
+ catName, location);
+ execute(new SchemaToolTaskAlterCatalog(), argsAlter1);
+ Catalog cat = client.getCatalog(catName);
+ Assert.assertEquals(location, cat.getLocationUri());
+ Assert.assertEquals(description, cat.getDescription());
+
+ description = "a better description";
+ String argsAlter2 = String.format("-alterCatalog %s -catalogDescription \"%s\"",
+ catName, description);
+ execute(new SchemaToolTaskAlterCatalog(), argsAlter2);
+ cat = client.getCatalog(catName);
+ Assert.assertEquals(location, cat.getLocationUri());
+ Assert.assertEquals(description, cat.getDescription());
+
+ location = "file:///tmp/a_third_location";
+ description = "best description yet";
+ String argsAlter3 = String.format("-alterCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
+ catName, location, description);
+ execute(new SchemaToolTaskAlterCatalog(), argsAlter3);
+ cat = client.getCatalog(catName);
+ Assert.assertEquals(location, cat.getLocationUri());
+ Assert.assertEquals(description, cat.getDescription());
+ }
+
+ @Test(expected = HiveMetaException.class)
+ public void alterBogusCatalog() throws HiveMetaException {
+ String catName = "nosuch";
+ String location = "file:///tmp/somewhere";
+ String description = "whatever";
+ String argsAlter = String.format("-alterCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
+ catName, location, description);
+ execute(new SchemaToolTaskAlterCatalog(), argsAlter);
+ }
+
+ @Test(expected = HiveMetaException.class)
+ public void alterCatalogNoChange() throws HiveMetaException {
+ String catName = "alter_cat_no_change";
+ String location = "file:///tmp/alter_cat_no_change";
+ String description = "description";
+ String argsCreate = String.format("-createCatalog %s -catalogLocation \"%s\" -catalogDescription \"%s\"",
+ catName, location, description);
+ execute(new SchemaToolTaskCreateCatalog(), argsCreate);
+
+ String argsAlter = String.format("-alterCatalog %s", catName);
+ execute(new SchemaToolTaskAlterCatalog(), argsAlter);
+ }
+
+ @Test
+ public void moveDatabase() throws HiveMetaException, TException {
+ String toCatName = "moveDbCat";
+ String dbName = "moveDbDb";
+ String tableName = "moveDbTable";
+ String funcName = "movedbfunc";
+ String partVal = "moveDbKey";
+
+ new CatalogBuilder()
+ .setName(toCatName)
+ .setLocation("file:///tmp")
+ .create(client);
+
+ Database db = new DatabaseBuilder()
+ .setCatalogName(DEFAULT_CATALOG_NAME)
+ .setName(dbName)
+ .create(client, conf);
+
+ new FunctionBuilder()
+ .inDb(db)
+ .setName(funcName)
+ .setClass("org.apache.hive.myudf")
+ .create(client, conf);
+
+ Table table = new TableBuilder()
+ .inDb(db)
+ .setTableName(tableName)
+ .addCol("a", "int")
+ .addPartCol("p", "string")
+ .create(client, conf);
+
+ new PartitionBuilder()
+ .inTable(table)
+ .addValue(partVal)
+ .addToTable(client, conf);
+
+ String argsMoveDB = String.format("-moveDatabase %s -fromCatalog %s -toCatalog %s", dbName,
+ DEFAULT_CATALOG_NAME, toCatName);
+ execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
+
+ Database fetchedDb = client.getDatabase(toCatName, dbName);
+ Assert.assertNotNull(fetchedDb);
+ Assert.assertEquals(toCatName.toLowerCase(), fetchedDb.getCatalogName());
+
+ Function fetchedFunction = client.getFunction(toCatName, dbName, funcName);
+ Assert.assertNotNull(fetchedFunction);
+ Assert.assertEquals(toCatName.toLowerCase(), fetchedFunction.getCatName());
+ Assert.assertEquals(dbName.toLowerCase(), fetchedFunction.getDbName());
+
+ Table fetchedTable = client.getTable(toCatName, dbName, tableName);
+ Assert.assertNotNull(fetchedTable);
+ Assert.assertEquals(toCatName.toLowerCase(), fetchedTable.getCatName());
+ Assert.assertEquals(dbName.toLowerCase(), fetchedTable.getDbName());
+
+ Partition fetchedPart =
+ client.getPartition(toCatName, dbName, tableName, Collections.singletonList(partVal));
+ Assert.assertNotNull(fetchedPart);
+ Assert.assertEquals(toCatName.toLowerCase(), fetchedPart.getCatName());
+ Assert.assertEquals(dbName.toLowerCase(), fetchedPart.getDbName());
+ Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
+ }
+
+ @Test
+ public void moveDatabaseWithExistingDbOfSameNameAlreadyInTargetCatalog()
+ throws TException, HiveMetaException {
+ String catName = "clobberCatalog";
+ new CatalogBuilder()
+ .setName(catName)
+ .setLocation("file:///tmp")
+ .create(client);
+ try {
+ String argsMoveDB = String.format("-moveDatabase %s -fromCatalog %s -toCatalog %s",
+ DEFAULT_DATABASE_NAME, catName, DEFAULT_CATALOG_NAME);
+ execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
+ Assert.fail("Attempt to move default database should have failed.");
+ } catch (HiveMetaException e) {
+ // good
+ }
+
+ // Make sure nothing really moved
+ Set dbNames = new HashSet<>(client.getAllDatabases(DEFAULT_CATALOG_NAME));
+ Assert.assertTrue(dbNames.contains(DEFAULT_DATABASE_NAME));
+ }
+
+ @Test(expected = HiveMetaException.class)
+ public void moveNonExistentDatabase() throws TException, HiveMetaException {
+ String catName = "moveNonExistentDb";
+ new CatalogBuilder()
+ .setName(catName)
+ .setLocation("file:///tmp")
+ .create(client);
+ String argsMoveDB = String.format("-moveDatabase nosuch -fromCatalog %s -toCatalog %s",
+ catName, DEFAULT_CATALOG_NAME);
+ execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
+ }
+
+ @Test
+ public void moveDbToNonExistentCatalog() throws TException, HiveMetaException {
+ String dbName = "doomedToHomelessness";
+ new DatabaseBuilder()
+ .setName(dbName)
+ .create(client, conf);
+ try {
+ String argsMoveDB = String.format("-moveDatabase %s -fromCatalog %s -toCatalog nosuch",
+ dbName, DEFAULT_CATALOG_NAME);
+ execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
+ Assert.fail("Attempt to move database to non-existent catalog should have failed.");
+ } catch (HiveMetaException e) {
+ // good
+ }
+
+ // Make sure nothing really moved
+ Set dbNames = new HashSet<>(client.getAllDatabases(DEFAULT_CATALOG_NAME));
+ Assert.assertTrue(dbNames.contains(dbName.toLowerCase()));
+ }
+
+ @Test
+ public void moveTable() throws TException, HiveMetaException {
+ String toCatName = "moveTableCat";
+ String toDbName = "moveTableDb";
+ String tableName = "moveTableTable";
+ String partVal = "moveTableKey";
+
+ new CatalogBuilder()
+ .setName(toCatName)
+ .setLocation("file:///tmp")
+ .create(client);
+
+ new DatabaseBuilder()
+ .setCatalogName(toCatName)
+ .setName(toDbName)
+ .create(client, conf);
+
+ Table table = new TableBuilder()
+ .setTableName(tableName)
+ .addCol("a", "int")
+ .addPartCol("p", "string")
+ .create(client, conf);
+
+ new PartitionBuilder()
+ .inTable(table)
+ .addValue(partVal)
+ .addToTable(client, conf);
+
+ String argsMoveTable = String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s",
+ tableName, DEFAULT_CATALOG_NAME, toCatName, DEFAULT_DATABASE_NAME, toDbName);
+ execute(new SchemaToolTaskMoveTable(), argsMoveTable);
+
+ Table fetchedTable = client.getTable(toCatName, toDbName, tableName);
+ Assert.assertNotNull(fetchedTable);
+ Assert.assertEquals(toCatName.toLowerCase(), fetchedTable.getCatName());
+ Assert.assertEquals(toDbName.toLowerCase(), fetchedTable.getDbName());
+
+ Partition fetchedPart =
+ client.getPartition(toCatName, toDbName, tableName, Collections.singletonList(partVal));
+ Assert.assertNotNull(fetchedPart);
+ Assert.assertEquals(toCatName.toLowerCase(), fetchedPart.getCatName());
+ Assert.assertEquals(toDbName.toLowerCase(), fetchedPart.getDbName());
+ Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
+ }
+
+ @Test
+ public void moveTableWithinCatalog() throws TException, HiveMetaException {
+ String toDbName = "moveTableWithinCatalogDb";
+ String tableName = "moveTableWithinCatalogTable";
+ String partVal = "moveTableWithinCatalogKey";
+
+ new DatabaseBuilder()
+ .setName(toDbName)
+ .create(client, conf);
+
+ Table table = new TableBuilder()
+ .setTableName(tableName)
+ .addCol("a", "int")
+ .addPartCol("p", "string")
+ .create(client, conf);
+
+ new PartitionBuilder()
+ .inTable(table)
+ .addValue(partVal)
+ .addToTable(client, conf);
+
+ String argsMoveTable = String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s",
+ tableName, DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME, toDbName);
+ execute(new SchemaToolTaskMoveTable(), argsMoveTable);
+
+ Table fetchedTable = client.getTable(DEFAULT_CATALOG_NAME, toDbName, tableName);
+ Assert.assertNotNull(fetchedTable);
+ Assert.assertEquals(DEFAULT_CATALOG_NAME, fetchedTable.getCatName());
+ Assert.assertEquals(toDbName.toLowerCase(), fetchedTable.getDbName());
+
+ Partition fetchedPart =
+ client.getPartition(DEFAULT_CATALOG_NAME, toDbName, tableName, Collections.singletonList(partVal));
+ Assert.assertNotNull(fetchedPart);
+ Assert.assertEquals(DEFAULT_CATALOG_NAME, fetchedPart.getCatName());
+ Assert.assertEquals(toDbName.toLowerCase(), fetchedPart.getDbName());
+ Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
+ }
+
+ @Test
+ public void moveTableWithExistingTableOfSameNameAlreadyInTargetDatabase()
+ throws TException, HiveMetaException {
+ String toDbName = "clobberTableDb";
+ String tableName = "clobberTableTable";
+
+ Database toDb = new DatabaseBuilder()
+ .setName(toDbName)
+ .create(client, conf);
+
+ new TableBuilder()
+ .setTableName(tableName)
+ .addCol("a", "int")
+ .create(client, conf);
+
+ new TableBuilder()
+ .inDb(toDb)
+ .setTableName(tableName)
+ .addCol("b", "varchar(32)")
+ .create(client, conf);
+
+ try {
+ String argsMoveTable =
+ String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s",
+ tableName, DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME, toDbName);
+ execute(new SchemaToolTaskMoveTable(), argsMoveTable);
+ Assert.fail("Attempt to move table should have failed.");
+ } catch (HiveMetaException e) {
+ // good
+ }
+
+ // Make sure nothing really moved
+ Set tableNames = new HashSet<>(client.getAllTables(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME));
+ Assert.assertTrue(tableNames.contains(tableName.toLowerCase()));
+
+ // Make sure the table in the target database didn't get clobbered
+ Table fetchedTable = client.getTable(DEFAULT_CATALOG_NAME, toDbName, tableName);
+ Assert.assertEquals("b", fetchedTable.getSd().getCols().get(0).getName());
+ }
+
+ @Test(expected = HiveMetaException.class)
+ public void moveNonExistentTable() throws TException, HiveMetaException {
+ String toDbName = "moveNonExistentTable";
+ new DatabaseBuilder()
+ .setName(toDbName)
+ .create(client, conf);
+ String argsMoveTable =
+ String.format("-moveTable nosuch -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s",
+ DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME, toDbName);
+ execute(new SchemaToolTaskMoveTable(), argsMoveTable);
+ }
+
+ @Test
+ public void moveTableToNonExistentDb() throws TException, HiveMetaException {
+ String tableName = "doomedToWander";
+ new TableBuilder()
+ .setTableName(tableName)
+ .addCol("a", "int")
+ .create(client, conf);
+
+ try {
+ String argsMoveTable =
+ String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase nosuch",
+ tableName, DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
+ execute(new SchemaToolTaskMoveTable(), argsMoveTable);
+ Assert.fail("Attempt to move table to non-existent table should have failed.");
+ } catch (HiveMetaException e) {
+ // good
+ }
+
+ // Make sure nothing really moved
+ Set tableNames = new HashSet<>(client.getAllTables(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME));
+ Assert.assertTrue(tableNames.contains(tableName.toLowerCase()));
+ }
+
+ private static void execute(SchemaToolTask task, String taskArgs) throws HiveMetaException {
+ try {
+ StrTokenizer tokenizer = new StrTokenizer(argsBase + taskArgs, ' ', '\"');
+ SchemaToolCommandLine cl = new SchemaToolCommandLine(tokenizer.getTokenArray(), null);
+ task.setCommandLineArguments(cl);
+ } catch (Exception e) {
+ throw new IllegalStateException("Could not parse comman line \n" + argsBase + taskArgs, e);
+ }
+
+ task.setHiveSchemaTool(schemaTool);
+ task.execute();
+ }
+}
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/schematool/TestSchemaTool.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/schematool/TestSchemaTool.java
index 0132a0b..e0b93f3 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/schematool/TestSchemaTool.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/schematool/TestSchemaTool.java
@@ -25,9 +25,9 @@
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.NestedScriptParser;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.PostgresCommandParser;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper.NestedScriptParser;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper.PostgresCommandParser;
import org.junit.Assert;
import org.junit.Test;
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreSchemaInfo.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreSchemaInfo.java
index ed4a2ef..f4c8f65 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreSchemaInfo.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreSchemaInfo.java
@@ -22,7 +22,7 @@
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper;
/**
* Defines the method which must be implemented to be used using schema tool to support metastore
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
index 88bd42d..49e19ad 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
@@ -30,10 +30,10 @@
import java.util.List;
import java.util.Map;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
-
import com.google.common.collect.ImmutableMap;
+
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper;
+import org.apache.hadoop.hive.metastore.tools.schematool.HiveSchemaHelper.MetaStoreConnectionInfo;
import org.apache.hadoop.hive.metastore.utils.MetastoreVersionInfo;
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
deleted file mode 100644
index 2da07a5..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
+++ /dev/null
@@ -1,673 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.util.IllegalFormatException;
-import java.util.List;
-
-public class HiveSchemaHelper {
- private static final Logger LOG = LoggerFactory.getLogger(HiveSchemaHelper.class);
-
- public static final String DB_DERBY = "derby";
- public static final String DB_HIVE = "hive";
- public static final String DB_MSSQL = "mssql";
- public static final String DB_MYSQL = "mysql";
- public static final String DB_POSTGRACE = "postgres";
- public static final String DB_ORACLE = "oracle";
- public static final String EMBEDDED_HS2_URL =
- "jdbc:hive2://?hive.conf.restricted.list=;hive.security.authorization.sqlstd.confwhitelist=.*;"
- + "hive.security.authorization.sqlstd.confwhitelist.append=.*;hive.security.authorization.enabled=false;"
- + "hive.metastore.uris=;hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdConfOnlyAuthorizerFactory;"
- + "hive.support.concurrency=false;hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager;"
- + "hive.metastore.rawstore.impl=org.apache.hadoop.hive.metastore.ObjectStore";
- public static final String HIVE_JDBC_DRIVER = "org.apache.hive.jdbc.HiveDriver";
-
- /***
- * Get JDBC connection to metastore db
- * @param userName metastore connection username
- * @param password metastore connection password
- * @param url Metastore URL. If null will be read from config file.
- * @param driver Driver class. If null will be read from config file.
- * @param printInfo print connection parameters
- * @param conf hive config object
- * @param schema the schema to create the connection for
- * @return metastore connection object
- * @throws org.apache.hadoop.hive.metastore.HiveMetaException
- */
- public static Connection getConnectionToMetastore(String userName, String password, String url,
- String driver, boolean printInfo, Configuration conf, String schema) throws HiveMetaException {
- try {
- url = url == null ? getValidConfVar(MetastoreConf.ConfVars.CONNECT_URL_KEY, conf) : url;
- driver = driver == null ? getValidConfVar(MetastoreConf.ConfVars.CONNECTION_DRIVER, conf) : driver;
- if (printInfo) {
- logAndPrintToStdout("Metastore connection URL:\t " + url);
- logAndPrintToStdout("Metastore Connection Driver :\t " + driver);
- logAndPrintToStdout("Metastore connection User:\t " + userName);
- if (MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST)) {
- logAndPrintToStdout("Metastore connection Password:\t " + password);
- }
- }
- if ((userName == null) || userName.isEmpty()) {
- throw new HiveMetaException("UserName empty ");
- }
-
- // load required JDBC driver
- Class.forName(driver);
-
- // Connect using the JDBC URL and user/pass from conf
- Connection conn = DriverManager.getConnection(url, userName, password);
- if (schema != null) {
- conn.setSchema(schema);
- }
- return conn;
- } catch (IOException | SQLException e) {
- throw new HiveMetaException("Failed to get schema version.", e);
- } catch (ClassNotFoundException e) {
- LOG.error("Unable to find driver class", e);
- throw new HiveMetaException("Failed to load driver", e);
- }
- }
-
- public static Connection getConnectionToMetastore(MetaStoreConnectionInfo info, String schema)
- throws HiveMetaException {
- return getConnectionToMetastore(info.getUsername(), info.getPassword(), info.getUrl(), info.getDriver(),
- info.getPrintInfo(), info.getConf(), schema);
- }
-
- public static String getValidConfVar(MetastoreConf.ConfVars confVar, Configuration conf)
- throws IOException {
- String confVarStr = MetastoreConf.getAsString(conf, confVar);
- if (confVarStr == null || confVarStr.isEmpty()) {
- throw new IOException("Empty " + confVar.getVarname());
- }
- return confVarStr.trim();
- }
-
- private static void logAndPrintToStdout(String msg) {
- LOG.info(msg);
- System.out.println(msg);
- }
-
- public interface NestedScriptParser {
-
- enum CommandType {
- PARTIAL_STATEMENT,
- TERMINATED_STATEMENT,
- COMMENT
- }
-
- String DEFAULT_DELIMITER = ";";
- String DEFAULT_QUOTE = "\"";
-
- /**
- * Find the type of given command
- *
- * @param dbCommand
- * @return
- */
- boolean isPartialCommand(String dbCommand) throws IllegalArgumentException;
-
- /**
- * Parse the DB specific nesting format and extract the inner script name if any
- *
- * @param dbCommand command from parent script
- * @return
- * @throws IllegalFormatException
- */
- String getScriptName(String dbCommand) throws IllegalArgumentException;
-
- /**
- * Find if the given command is a nested script execution
- *
- * @param dbCommand
- * @return
- */
- boolean isNestedScript(String dbCommand);
-
- /**
- * Find if the given command should not be passed to DB
- *
- * @param dbCommand
- * @return
- */
- boolean isNonExecCommand(String dbCommand);
-
- /**
- * Get the SQL statement delimiter
- *
- * @return
- */
- String getDelimiter();
-
- /**
- * Get the SQL indentifier quotation character
- *
- * @return
- */
- String getQuoteCharacter();
-
- /**
- * Clear any client specific tags
- *
- * @return
- */
- String cleanseCommand(String dbCommand);
-
- /**
- * Does the DB required table/column names quoted
- *
- * @return
- */
- boolean needsQuotedIdentifier();
-
- /**
- * Flatten the nested upgrade script into a buffer
- *
- * @param scriptDir upgrade script directory
- * @param scriptFile upgrade script file
- * @return string of sql commands
- */
- String buildCommand(String scriptDir, String scriptFile)
- throws IllegalFormatException, IOException;
-
- /**
- * Flatten the nested upgrade script into a buffer
- *
- * @param scriptDir upgrade script directory
- * @param scriptFile upgrade script file
- * @param fixQuotes whether to replace quote characters
- * @return string of sql commands
- */
- String buildCommand(String scriptDir, String scriptFile, boolean fixQuotes)
- throws IllegalFormatException, IOException;
- }
-
- /***
- * Base implementation of NestedScriptParser
- * abstractCommandParser.
- *
- */
- private static abstract class AbstractCommandParser implements NestedScriptParser {
- private List dbOpts;
- private String msUsername;
- private String msPassword;
- private Configuration conf;
- // Depending on whether we are using beeline or sqlline the line endings have to be handled
- // differently.
- private final boolean usingSqlLine;
-
- public AbstractCommandParser(String dbOpts, String msUsername, String msPassword,
- Configuration conf, boolean usingSqlLine) {
- setDbOpts(dbOpts);
- this.msUsername = msUsername;
- this.msPassword = msPassword;
- this.conf = conf;
- this.usingSqlLine = usingSqlLine;
- }
-
- @Override
- public boolean isPartialCommand(String dbCommand) throws IllegalArgumentException{
- if (dbCommand == null || dbCommand.isEmpty()) {
- throw new IllegalArgumentException("invalid command line " + dbCommand);
- }
- dbCommand = dbCommand.trim();
- if (dbCommand.endsWith(getDelimiter()) || isNonExecCommand(dbCommand)) {
- return false;
- } else {
- return true;
- }
- }
-
- @Override
- public boolean isNonExecCommand(String dbCommand) {
- return (dbCommand.startsWith("--") || dbCommand.startsWith("#"));
- }
-
- @Override
- public String getDelimiter() {
- return DEFAULT_DELIMITER;
- }
-
- @Override
- public String getQuoteCharacter() {
- return DEFAULT_QUOTE;
- }
-
-
- @Override
- public String cleanseCommand(String dbCommand) {
- // strip off the delimiter
- if (dbCommand.endsWith(getDelimiter())) {
- dbCommand = dbCommand.substring(0,
- dbCommand.length() - getDelimiter().length());
- }
- return dbCommand;
- }
-
- @Override
- public boolean needsQuotedIdentifier() {
- return false;
- }
-
- @Override
- public String buildCommand(
- String scriptDir, String scriptFile) throws IllegalFormatException, IOException {
- return buildCommand(scriptDir, scriptFile, false);
- }
-
- @Override
- public String buildCommand(
- String scriptDir, String scriptFile, boolean fixQuotes) throws IllegalFormatException, IOException {
- BufferedReader bfReader =
- new BufferedReader(new FileReader(scriptDir + File.separatorChar + scriptFile));
- String currLine;
- StringBuilder sb = new StringBuilder();
- String currentCommand = null;
- while ((currLine = bfReader.readLine()) != null) {
- currLine = currLine.trim();
-
- if (fixQuotes && !getQuoteCharacter().equals(DEFAULT_QUOTE)) {
- currLine = currLine.replace("\\\"", getQuoteCharacter());
- }
-
- if (currLine.isEmpty()) {
- continue; // skip empty lines
- }
-
- if (currentCommand == null) {
- currentCommand = currLine;
- } else {
- currentCommand = currentCommand + " " + currLine;
- }
- if (isPartialCommand(currLine)) {
- // if its a partial line, continue collecting the pieces
- continue;
- }
-
- // if this is a valid executable command then add it to the buffer
- if (!isNonExecCommand(currentCommand)) {
- currentCommand = cleanseCommand(currentCommand);
- if (isNestedScript(currentCommand)) {
- // if this is a nested sql script then flatten it
- String currScript = getScriptName(currentCommand);
- sb.append(buildCommand(scriptDir, currScript));
- } else {
- // Now we have a complete statement, process it
- // write the line to buffer
- sb.append(currentCommand);
- if (usingSqlLine) sb.append(";");
- sb.append(System.getProperty("line.separator"));
- }
- }
- currentCommand = null;
- }
- bfReader.close();
- return sb.toString();
- }
-
- private void setDbOpts(String dbOpts) {
- if (dbOpts != null) {
- this.dbOpts = Lists.newArrayList(dbOpts.split(","));
- } else {
- this.dbOpts = Lists.newArrayList();
- }
- }
-
- protected List getDbOpts() {
- return dbOpts;
- }
-
- protected String getMsUsername() {
- return msUsername;
- }
-
- protected String getMsPassword() {
- return msPassword;
- }
-
- protected Configuration getConf() {
- return conf;
- }
- }
-
- // Derby commandline parser
- public static class DerbyCommandParser extends AbstractCommandParser {
- private static final String DERBY_NESTING_TOKEN = "RUN";
-
- public DerbyCommandParser(String dbOpts, String msUsername, String msPassword,
- Configuration conf, boolean usingSqlLine) {
- super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- }
-
- @Override
- public String getScriptName(String dbCommand) throws IllegalArgumentException {
-
- if (!isNestedScript(dbCommand)) {
- throw new IllegalArgumentException("Not a script format " + dbCommand);
- }
- String[] tokens = dbCommand.split(" ");
- if (tokens.length != 2) {
- throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
- }
- return tokens[1].replace(";", "").replaceAll("'", "");
- }
-
- @Override
- public boolean isNestedScript(String dbCommand) {
- // Derby script format is RUN ''
- return dbCommand.startsWith(DERBY_NESTING_TOKEN);
- }
- }
-
- // Derby commandline parser
- public static class HiveCommandParser extends AbstractCommandParser {
- private static String HIVE_NESTING_TOKEN = "SOURCE";
- private final NestedScriptParser nestedDbCommandParser;
-
- public HiveCommandParser(String dbOpts, String msUsername, String msPassword,
- Configuration conf, String metaDbType, boolean usingSqlLine) {
- super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- nestedDbCommandParser = getDbCommandParser(metaDbType, usingSqlLine);
- }
-
- @Override
- public String getQuoteCharacter() {
- return nestedDbCommandParser.getQuoteCharacter();
- }
-
- @Override
- public String getScriptName(String dbCommand) throws IllegalArgumentException {
-
- if (!isNestedScript(dbCommand)) {
- throw new IllegalArgumentException("Not a script format " + dbCommand);
- }
- String[] tokens = dbCommand.split(" ");
- if (tokens.length != 2) {
- throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
- }
- return tokens[1].replace(";", "");
- }
-
- @Override
- public boolean isNestedScript(String dbCommand) {
- return dbCommand.startsWith(HIVE_NESTING_TOKEN);
- }
- }
-
- // MySQL parser
- public static class MySqlCommandParser extends AbstractCommandParser {
- private static final String MYSQL_NESTING_TOKEN = "SOURCE";
- private static final String DELIMITER_TOKEN = "DELIMITER";
- private String delimiter = DEFAULT_DELIMITER;
-
- public MySqlCommandParser(String dbOpts, String msUsername, String msPassword,
- Configuration conf, boolean usingSqlLine) {
- super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- }
-
- @Override
- public boolean isPartialCommand(String dbCommand) throws IllegalArgumentException{
- boolean isPartial = super.isPartialCommand(dbCommand);
- // if this is a delimiter directive, reset our delimiter
- if (dbCommand.startsWith(DELIMITER_TOKEN)) {
- String[] tokens = dbCommand.split(" ");
- if (tokens.length != 2) {
- throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
- }
- delimiter = tokens[1];
- }
- return isPartial;
- }
-
- @Override
- public String getScriptName(String dbCommand) throws IllegalArgumentException {
- String[] tokens = dbCommand.split(" ");
- if (tokens.length != 2) {
- throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
- }
- // remove ending ';'
- return tokens[1].replace(";", "");
- }
-
- @Override
- public boolean isNestedScript(String dbCommand) {
- return dbCommand.startsWith(MYSQL_NESTING_TOKEN);
- }
-
- @Override
- public String getDelimiter() {
- return delimiter;
- }
-
- @Override
- public String getQuoteCharacter() {
- return "`";
- }
-
- @Override
- public boolean isNonExecCommand(String dbCommand) {
- return super.isNonExecCommand(dbCommand) ||
- (dbCommand.startsWith("/*") && dbCommand.endsWith("*/")) ||
- dbCommand.startsWith(DELIMITER_TOKEN);
- }
-
- @Override
- public String cleanseCommand(String dbCommand) {
- return super.cleanseCommand(dbCommand).replaceAll("/\\*.*?\\*/[^;]", "");
- }
-
- }
-
- // Postgres specific parser
- public static class PostgresCommandParser extends AbstractCommandParser {
- private static final String POSTGRES_NESTING_TOKEN = "\\i";
- @VisibleForTesting
- public static final String POSTGRES_STANDARD_STRINGS_OPT = "SET standard_conforming_strings";
- @VisibleForTesting
- public static final String POSTGRES_SKIP_STANDARD_STRINGS_DBOPT = "postgres.filter.81";
-
- public PostgresCommandParser(String dbOpts, String msUsername, String msPassword,
- Configuration conf, boolean usingSqlLine) {
- super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- }
-
- @Override
- public String getScriptName(String dbCommand) throws IllegalArgumentException {
- String[] tokens = dbCommand.split(" ");
- if (tokens.length != 2) {
- throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
- }
- // remove ending ';'
- return tokens[1].replace(";", "");
- }
-
- @Override
- public boolean isNestedScript(String dbCommand) {
- return dbCommand.startsWith(POSTGRES_NESTING_TOKEN);
- }
-
- @Override
- public boolean needsQuotedIdentifier() {
- return true;
- }
-
- @Override
- public boolean isNonExecCommand(String dbCommand) {
- // Skip "standard_conforming_strings" command which is read-only in older
- // Postgres versions like 8.1
- // See: http://www.postgresql.org/docs/8.2/static/release-8-1.html
- if (getDbOpts().contains(POSTGRES_SKIP_STANDARD_STRINGS_DBOPT)) {
- if (dbCommand.startsWith(POSTGRES_STANDARD_STRINGS_OPT)) {
- return true;
- }
- }
- return super.isNonExecCommand(dbCommand);
- }
- }
-
- //Oracle specific parser
- public static class OracleCommandParser extends AbstractCommandParser {
- private static final String ORACLE_NESTING_TOKEN = "@";
-
- public OracleCommandParser(String dbOpts, String msUsername, String msPassword,
- Configuration conf, boolean usingSqlLine) {
- super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- }
-
- @Override
- public String getScriptName(String dbCommand) throws IllegalArgumentException {
- if (!isNestedScript(dbCommand)) {
- throw new IllegalArgumentException("Not a nested script format " + dbCommand);
- }
- // remove ending ';' and starting '@'
- return dbCommand.replace(";", "").replace(ORACLE_NESTING_TOKEN, "");
- }
-
- @Override
- public boolean isNestedScript(String dbCommand) {
- return dbCommand.startsWith(ORACLE_NESTING_TOKEN);
- }
- }
-
- //MSSQL specific parser
- public static class MSSQLCommandParser extends AbstractCommandParser {
- private static final String MSSQL_NESTING_TOKEN = ":r";
-
- public MSSQLCommandParser(String dbOpts, String msUsername, String msPassword,
- Configuration conf, boolean usingSqlLine) {
- super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- }
-
- @Override
- public String getScriptName(String dbCommand) throws IllegalArgumentException {
- String[] tokens = dbCommand.split(" ");
- if (tokens.length != 2) {
- throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
- }
- return tokens[1];
- }
-
- @Override
- public boolean isNestedScript(String dbCommand) {
- return dbCommand.startsWith(MSSQL_NESTING_TOKEN);
- }
- }
-
- public static NestedScriptParser getDbCommandParser(String dbName, boolean usingSqlLine) {
- return getDbCommandParser(dbName, null, usingSqlLine);
- }
-
- public static NestedScriptParser getDbCommandParser(String dbName, String metaDbName, boolean usingSqlLine) {
- return getDbCommandParser(dbName, null, null, null, null, metaDbName, usingSqlLine);
- }
-
- public static NestedScriptParser getDbCommandParser(String dbName,
- String dbOpts, String msUsername, String msPassword,
- Configuration conf, String metaDbType, boolean usingSqlLine) {
- if (dbName.equalsIgnoreCase(DB_DERBY)) {
- return new DerbyCommandParser(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- } else if (dbName.equalsIgnoreCase(DB_HIVE)) {
- return new HiveCommandParser(dbOpts, msUsername, msPassword, conf, metaDbType, usingSqlLine);
- } else if (dbName.equalsIgnoreCase(DB_MSSQL)) {
- return new MSSQLCommandParser(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- } else if (dbName.equalsIgnoreCase(DB_MYSQL)) {
- return new MySqlCommandParser(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- } else if (dbName.equalsIgnoreCase(DB_POSTGRACE)) {
- return new PostgresCommandParser(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- } else if (dbName.equalsIgnoreCase(DB_ORACLE)) {
- return new OracleCommandParser(dbOpts, msUsername, msPassword, conf, usingSqlLine);
- } else {
- throw new IllegalArgumentException("Unknown dbType " + dbName);
- }
- }
-
- public static class MetaStoreConnectionInfo {
- private final String userName;
- private final String password;
- private final String url;
- private final String driver;
- private final boolean printInfo;
- private final Configuration conf;
- private final String dbType;
- private final String metaDbType;
-
- public MetaStoreConnectionInfo(String userName, String password, String url, String driver,
- boolean printInfo, Configuration conf, String dbType, String metaDbType) {
- super();
- this.userName = userName;
- this.password = password;
- this.url = url;
- this.driver = driver;
- this.printInfo = printInfo;
- this.conf = conf;
- this.dbType = dbType;
- this.metaDbType = metaDbType;
- }
-
- public String getPassword() {
- return password;
- }
-
- public String getUrl() {
- return url;
- }
-
- public String getDriver() {
- return driver;
- }
-
- public boolean isPrintInfo() {
- return printInfo;
- }
-
- public Configuration getConf() {
- return conf;
- }
-
- public String getUsername() {
- return userName;
- }
-
- public boolean getPrintInfo() {
- return printInfo;
- }
-
- public String getDbType() {
- return dbType;
- }
-
- public String getMetaDbType() {
- return metaDbType;
- }
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
deleted file mode 100644
index 2fac79f..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
+++ /dev/null
@@ -1,469 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.io.output.NullOutputStream;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.IMetaStoreSchemaInfo;
-import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfoFactory;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.NestedScriptParser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import sqlline.SqlLine;
-
-import java.io.BufferedReader;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.PrintStream;
-import java.net.URI;
-import java.sql.Connection;
-import java.sql.SQLException;
-
-public class MetastoreSchemaTool {
- private static final Logger LOG = LoggerFactory.getLogger(MetastoreSchemaTool.class);
- private static final String PASSWD_MASK = "[passwd stripped]";
-
- protected Configuration conf;
-
- protected String dbOpts = null;
- protected String dbType;
- protected String driver = null;
- protected boolean dryRun = false;
- protected String hiveDb; // Hive database, for use when creating the user, not for connecting
- protected String hivePasswd; // Hive password, for use when creating the user, not for connecting
- protected String hiveUser; // Hive username, for use when creating the user, not for connecting
- protected String metaDbType;
- protected IMetaStoreSchemaInfo metaStoreSchemaInfo;
- protected boolean needsQuotedIdentifier;
- protected String quoteCharacter;
- protected String passWord = null;
- protected String url = null;
- protected String userName = null;
- protected URI[] validationServers = null; // The list of servers the database/partition/table can locate on
- protected boolean verbose = false;
- protected SchemaToolCommandLine cmdLine;
-
- private static String homeDir;
-
- private static String findHomeDir() {
- // If METASTORE_HOME is set, use it, else use HIVE_HOME for backwards compatibility.
- homeDir = homeDir == null ? System.getenv("METASTORE_HOME") : homeDir;
- return homeDir == null ? System.getenv("HIVE_HOME") : homeDir;
- }
-
- @VisibleForTesting
- public static void setHomeDirForTesting() {
- homeDir = System.getProperty("test.tmp.dir", "target/tmp");
- }
-
- @VisibleForTesting
- public MetastoreSchemaTool() {
-
- }
-
- @VisibleForTesting
- public void init(String metastoreHome, String[] args, OptionGroup additionalOptions,
- Configuration conf) throws HiveMetaException {
- try {
- cmdLine = new SchemaToolCommandLine(args, additionalOptions);
- } catch (ParseException e) {
- System.err.println("Failed to parse command line. ");
- throw new HiveMetaException(e);
- }
-
- if (metastoreHome == null || metastoreHome.isEmpty()) {
- throw new HiveMetaException("No Metastore home directory provided");
- }
- this.conf = conf;
- this.dbType = cmdLine.getDbType();
- this.metaDbType = cmdLine.getMetaDbType();
- NestedScriptParser parser = getDbCommandParser(dbType, metaDbType);
- this.needsQuotedIdentifier = parser.needsQuotedIdentifier();
- this.quoteCharacter = parser.getQuoteCharacter();
- this.metaStoreSchemaInfo = MetaStoreSchemaInfoFactory.get(conf, metastoreHome, dbType);
- // If the dbType is "hive", this is setting up the information schema in Hive.
- // We will set the default jdbc url and driver.
- // It is overriden by command line options if passed (-url and -driver
- if (dbType.equalsIgnoreCase(HiveSchemaHelper.DB_HIVE)) {
- this.url = HiveSchemaHelper.EMBEDDED_HS2_URL;
- this.driver = HiveSchemaHelper.HIVE_JDBC_DRIVER;
- }
-
- if (cmdLine.hasOption("userName")) {
- setUserName(cmdLine.getOptionValue("userName"));
- } else {
- setUserName(getConf().get(MetastoreConf.ConfVars.CONNECTION_USER_NAME.getVarname()));
- }
- if (cmdLine.hasOption("passWord")) {
- setPassWord(cmdLine.getOptionValue("passWord"));
- } else {
- try {
- setPassWord(MetastoreConf.getPassword(getConf(), ConfVars.PWD));
- } catch (IOException err) {
- throw new HiveMetaException("Error getting metastore password", err);
- }
- }
- if (cmdLine.hasOption("url")) {
- setUrl(cmdLine.getOptionValue("url"));
- }
- if (cmdLine.hasOption("driver")) {
- setDriver(cmdLine.getOptionValue("driver"));
- }
- if (cmdLine.hasOption("dryRun")) {
- setDryRun(true);
- }
- if (cmdLine.hasOption("verbose")) {
- setVerbose(true);
- }
- if (cmdLine.hasOption("dbOpts")) {
- setDbOpts(cmdLine.getOptionValue("dbOpts"));
- }
- if (cmdLine.hasOption("validate") && cmdLine.hasOption("servers")) {
- setValidationServers(cmdLine.getOptionValue("servers"));
- }
- if (cmdLine.hasOption("hiveUser")) {
- setHiveUser(cmdLine.getOptionValue("hiveUser"));
- }
- if (cmdLine.hasOption("hivePassword")) {
- setHivePasswd(cmdLine.getOptionValue("hivePassword"));
- }
- if (cmdLine.hasOption("hiveDb")) {
- setHiveDb(cmdLine.getOptionValue("hiveDb"));
- }
- }
-
- public Configuration getConf() {
- return conf;
- }
-
- protected String getDbType() {
- return dbType;
- }
-
- protected void setUrl(String url) {
- this.url = url;
- }
-
- protected void setDriver(String driver) {
- this.driver = driver;
- }
-
- public void setUserName(String userName) {
- this.userName = userName;
- }
-
- public void setPassWord(String passWord) {
- this.passWord = passWord;
- }
-
- protected boolean isDryRun() {
- return dryRun;
- }
-
- protected void setDryRun(boolean dryRun) {
- this.dryRun = dryRun;
- }
-
- protected boolean isVerbose() {
- return verbose;
- }
-
- protected void setVerbose(boolean verbose) {
- this.verbose = verbose;
- }
-
- protected void setDbOpts(String dbOpts) {
- this.dbOpts = dbOpts;
- }
-
- protected URI[] getValidationServers() {
- return validationServers;
- }
-
- protected void setValidationServers(String servers) {
- if(StringUtils.isNotEmpty(servers)) {
- String[] strServers = servers.split(",");
- this.validationServers = new URI[strServers.length];
- for (int i = 0; i < validationServers.length; i++) {
- validationServers[i] = new Path(strServers[i]).toUri();
- }
- }
- }
-
- protected String getHiveUser() {
- return hiveUser;
- }
-
- protected void setHiveUser(String hiveUser) {
- this.hiveUser = hiveUser;
- }
-
- protected String getHivePasswd() {
- return hivePasswd;
- }
-
- protected void setHivePasswd(String hivePasswd) {
- this.hivePasswd = hivePasswd;
- }
-
- protected String getHiveDb() {
- return hiveDb;
- }
-
- protected void setHiveDb(String hiveDb) {
- this.hiveDb = hiveDb;
- }
-
- protected SchemaToolCommandLine getCmdLine() {
- return cmdLine;
- }
-
- public Connection getConnectionToMetastore(boolean printInfo) throws HiveMetaException {
- return HiveSchemaHelper.getConnectionToMetastore(userName,
- passWord, url, driver, printInfo, conf, null);
- }
-
- protected NestedScriptParser getDbCommandParser(String dbType, String metaDbType) {
- return HiveSchemaHelper.getDbCommandParser(dbType, dbOpts, userName,
- passWord, conf, null, true);
- }
-
- protected MetaStoreConnectionInfo getConnectionInfo(boolean printInfo) {
- return new MetaStoreConnectionInfo(userName, passWord, url, driver, printInfo, conf,
- dbType, hiveDb);
- }
-
- protected IMetaStoreSchemaInfo getMetaStoreSchemaInfo() {
- return metaStoreSchemaInfo;
- }
-
- /**
- * check if the current schema version in metastore matches the Hive version
- */
- @VisibleForTesting
- void verifySchemaVersion() throws HiveMetaException {
- // don't check version if its a dry run
- if (dryRun) {
- return;
- }
- String newSchemaVersion = metaStoreSchemaInfo.getMetaStoreSchemaVersion(getConnectionInfo(false));
- // verify that the new version is added to schema
- assertCompatibleVersion(metaStoreSchemaInfo.getHiveSchemaVersion(), newSchemaVersion);
- }
-
- protected void assertCompatibleVersion(String hiveSchemaVersion, String dbSchemaVersion)
- throws HiveMetaException {
- if (!metaStoreSchemaInfo.isVersionCompatible(hiveSchemaVersion, dbSchemaVersion)) {
- throw new HiveMetaException("Metastore schema version is not compatible. Hive Version: "
- + hiveSchemaVersion + ", Database Schema Version: " + dbSchemaVersion);
- }
- }
-
- /***
- * Execute a given metastore script. This default version uses sqlline to execute the files,
- * which requires only running one file. Subclasses can use other executors.
- * @param scriptDir directory script is in
- * @param scriptFile file in the directory to run
- * @throws IOException if it cannot read the file or directory
- * @throws HiveMetaException default implementation never throws this
- */
- protected void execSql(String scriptDir, String scriptFile) throws IOException, HiveMetaException {
-
- execSql(scriptDir + File.separatorChar + scriptFile);
- }
-
- // Generate the beeline args per hive conf and execute the given script
- protected void execSql(String sqlScriptFile) throws IOException {
- CommandBuilder builder = new CommandBuilder(conf, url, driver, userName, passWord, sqlScriptFile);
-
- // run the script using SqlLine
- SqlLine sqlLine = new SqlLine();
- ByteArrayOutputStream outputForLog = null;
- if (!verbose) {
- OutputStream out;
- if (LOG.isDebugEnabled()) {
- out = outputForLog = new ByteArrayOutputStream();
- } else {
- out = new NullOutputStream();
- }
- sqlLine.setOutputStream(new PrintStream(out));
- System.setProperty("sqlline.silent", "true");
- }
- LOG.info("Going to run command <" + builder.buildToLog() + ">");
- SqlLine.Status status = sqlLine.begin(builder.buildToRun(), null, false);
- if (LOG.isDebugEnabled() && outputForLog != null) {
- LOG.debug("Received following output from Sqlline:");
- LOG.debug(outputForLog.toString("UTF-8"));
- }
- if (status != SqlLine.Status.OK) {
- throw new IOException("Schema script failed, errorcode " + status);
- }
- }
-
- // test the connection metastore using the config property
- protected void testConnectionToMetastore() throws HiveMetaException {
- Connection conn = getConnectionToMetastore(true);
- try {
- conn.close();
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to close metastore connection", e);
- }
- }
-
- // Quote if the database requires it
- protected String quote(String stmt) {
- stmt = stmt.replace("", needsQuotedIdentifier ? quoteCharacter : "");
- stmt = stmt.replace("", quoteCharacter);
- return stmt;
- }
-
- protected static class CommandBuilder {
- protected final String userName;
- protected final String password;
- protected final String sqlScriptFile;
- protected final String driver;
- protected final String url;
-
- protected CommandBuilder(Configuration conf, String url, String driver, String userName,
- String password, String sqlScriptFile) throws IOException {
- this.userName = userName;
- this.password = password;
- this.url = url == null ?
- HiveSchemaHelper.getValidConfVar(MetastoreConf.ConfVars.CONNECT_URL_KEY, conf) : url;
- this.driver = driver == null ?
- HiveSchemaHelper.getValidConfVar(MetastoreConf.ConfVars.CONNECTION_DRIVER, conf) : driver;
- this.sqlScriptFile = sqlScriptFile;
- }
-
- public String[] buildToRun() throws IOException {
- return argsWith(password);
- }
-
- public String buildToLog() throws IOException {
- logScript();
- return StringUtils.join(argsWith(PASSWD_MASK), " ");
- }
-
- protected String[] argsWith(String password) throws IOException {
- return new String[]
- {
- "-u", url,
- "-d", driver,
- "-n", userName,
- "-p", password,
- "--isolation=TRANSACTION_READ_COMMITTED",
- "-f", sqlScriptFile
- };
- }
-
- private void logScript() throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Going to invoke file that contains:");
- try (BufferedReader reader = new BufferedReader(new FileReader(sqlScriptFile))) {
- String line;
- while ((line = reader.readLine()) != null) {
- LOG.debug("script: " + line);
- }
- }
- }
- }
- }
-
- // Create the required command line options
- private static void logAndPrintToError(String errmsg) {
- LOG.error(errmsg);
- System.err.println(errmsg);
- }
-
- public static void main(String[] args) {
- MetastoreSchemaTool tool = new MetastoreSchemaTool();
- System.exit(tool.run(args));
- }
-
- public int run(String[] args) {
- return run(findHomeDir(), args, null, MetastoreConf.newMetastoreConf());
- }
-
- public int run(String metastoreHome, String[] args, OptionGroup additionalOptions,
- Configuration conf) {
- try {
- init(metastoreHome, args, additionalOptions, conf);
- SchemaToolTask task;
- if (cmdLine.hasOption("info")) {
- task = new SchemaToolTaskInfo();
- } else if (cmdLine.hasOption("upgradeSchema") || cmdLine.hasOption("upgradeSchemaFrom")) {
- task = new SchemaToolTaskUpgrade();
- } else if (cmdLine.hasOption("initSchema") || cmdLine.hasOption("initSchemaTo")) {
- task = new SchemaToolTaskInit();
- } else if (cmdLine.hasOption("initOrUpgradeSchema")) {
- task = new SchemaToolTaskInitOrUpgrade();
- } else if (cmdLine.hasOption("validate")) {
- task = new SchemaToolTaskValidate();
- } else if (cmdLine.hasOption("createCatalog")) {
- task = new SchemaToolTaskCreateCatalog();
- } else if (cmdLine.hasOption("alterCatalog")) {
- task = new SchemaToolTaskAlterCatalog();
- } else if (cmdLine.hasOption("moveDatabase")) {
- task = new SchemaToolTaskMoveDatabase();
- } else if (cmdLine.hasOption("moveTable")) {
- task = new SchemaToolTaskMoveTable();
- } else if (cmdLine.hasOption("createUser")) {
- task = new SchemaToolTaskCreateUser();
- } else {
- throw new HiveMetaException("No task defined!");
- }
-
- task.setHiveSchemaTool(this);
- task.setCommandLineArguments(cmdLine);
- task.execute();
- return 0;
- } catch (HiveMetaException e) {
- logAndPrintToError(e.getMessage());
- if (e.getCause() != null) {
- Throwable t = e.getCause();
- logAndPrintToError("Underlying cause: "
- + t.getClass().getName() + " : "
- + t.getMessage());
- if (e.getCause() instanceof SQLException) {
- logAndPrintToError("SQL Error code: " + ((SQLException) t).getErrorCode());
- }
- }
- if (cmdLine.hasOption("verbose")) {
- e.printStackTrace();
- } else {
- logAndPrintToError("Use --verbose for detailed stacktrace.");
- }
- logAndPrintToError("*** schemaTool failed ***");
- return 1;
-
- }
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolCommandLine.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolCommandLine.java
deleted file mode 100644
index d2d0e66..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolCommandLine.java
+++ /dev/null
@@ -1,310 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper;
-
-import com.google.common.collect.ImmutableSet;
-
-import java.util.Set;
-
-public class SchemaToolCommandLine {
- private final Options cmdLineOptions;
-
- @SuppressWarnings("static-access")
- private Options createOptions(OptionGroup additionalOptions) {
- Option help = new Option("help", "print this message");
- Option infoOpt = new Option("info", "Show config and schema details");
- Option upgradeOpt = new Option("upgradeSchema", "Schema upgrade");
- Option upgradeFromOpt = OptionBuilder.withArgName("upgradeFrom").hasArg()
- .withDescription("Schema upgrade from a version")
- .create("upgradeSchemaFrom");
- Option initOpt = new Option("initSchema", "Schema initialization");
- Option initToOpt = OptionBuilder.withArgName("initTo").hasArg()
- .withDescription("Schema initialization to a version")
- .create("initSchemaTo");
- Option initOrUpgradeSchemaOpt = new Option("initOrUpgradeSchema", "Initialize or upgrade schema to latest version");
- Option validateOpt = new Option("validate", "Validate the database");
- Option createCatalog = OptionBuilder
- .hasArg()
- .withDescription("Create a catalog, requires --catalogLocation parameter as well")
- .create("createCatalog");
- Option alterCatalog = OptionBuilder
- .hasArg()
- .withDescription("Alter a catalog, requires --catalogLocation and/or --catalogDescription parameter as well")
- .create("alterCatalog");
- Option moveDatabase = OptionBuilder
- .hasArg()
- .withDescription("Move a database between catalogs. Argument is the database name. " +
- "Requires --fromCatalog and --toCatalog parameters as well")
- .create("moveDatabase");
- Option moveTable = OptionBuilder
- .hasArg()
- .withDescription("Move a table to a different database. Argument is the table name. " +
- "Requires --fromCatalog, --toCatalog, --fromDatabase, and --toDatabase " +
- " parameters as well.")
- .create("moveTable");
- Option createUserOpt = new Option("createUser", "Create the Hive user, set hiveUser to the db" +
- " admin user and the hive password to the db admin password with this");
-
- OptionGroup optGroup = new OptionGroup();
- optGroup
- .addOption(help)
- .addOption(infoOpt)
- .addOption(upgradeOpt)
- .addOption(upgradeFromOpt)
- .addOption(initOpt)
- .addOption(initToOpt)
- .addOption(initOrUpgradeSchemaOpt)
- .addOption(validateOpt)
- .addOption(createCatalog)
- .addOption(alterCatalog)
- .addOption(moveDatabase)
- .addOption(moveTable)
- .addOption(createUserOpt);
- optGroup.setRequired(true);
-
- Option userNameOpt = OptionBuilder.withArgName("user")
- .hasArgs()
- .withDescription("Override config file user name")
- .create("userName");
- Option passwdOpt = OptionBuilder.withArgName("password")
- .hasArgs()
- .withDescription("Override config file password")
- .create("passWord");
- Option dbTypeOpt = OptionBuilder.withArgName("databaseType")
- .hasArgs().withDescription("Metastore database type").isRequired()
- .create("dbType");
- Option hiveUserOpt = OptionBuilder
- .hasArg()
- .withDescription("Hive user (for use with createUser)")
- .create("hiveUser");
- Option hivePasswdOpt = OptionBuilder
- .hasArg()
- .withDescription("Hive password (for use with createUser)")
- .create("hivePassword");
- Option hiveDbOpt = OptionBuilder
- .hasArg()
- .withDescription("Hive database (for use with createUser)")
- .create("hiveDb");
- /*
- Option metaDbTypeOpt = OptionBuilder.withArgName("metaDatabaseType")
- .hasArgs().withDescription("Used only if upgrading the system catalog for hive")
- .create("metaDbType");
- */
- Option urlOpt = OptionBuilder.withArgName("url")
- .hasArgs().withDescription("connection url to the database")
- .create("url");
- Option driverOpt = OptionBuilder.withArgName("driver")
- .hasArgs().withDescription("driver name for connection")
- .create("driver");
- Option dbOpts = OptionBuilder.withArgName("databaseOpts")
- .hasArgs().withDescription("Backend DB specific options")
- .create("dbOpts");
- Option dryRunOpt = new Option("dryRun", "list SQL scripts (no execute)");
- Option verboseOpt = new Option("verbose", "only print SQL statements");
- Option serversOpt = OptionBuilder.withArgName("serverList")
- .hasArgs().withDescription("a comma-separated list of servers used in location validation in the format of " +
- "scheme://authority (e.g. hdfs://localhost:8000)")
- .create("servers");
- Option catalogLocation = OptionBuilder
- .hasArg()
- .withDescription("Location of new catalog, required when adding a catalog")
- .create("catalogLocation");
- Option catalogDescription = OptionBuilder
- .hasArg()
- .withDescription("Description of new catalog")
- .create("catalogDescription");
- Option ifNotExists = OptionBuilder
- .withDescription("If passed then it is not an error to create an existing catalog")
- .create("ifNotExists");
- Option fromCatalog = OptionBuilder
- .hasArg()
- .withDescription("Catalog a moving database or table is coming from. This is " +
- "required if you are moving a database or table.")
- .create("fromCatalog");
- Option toCatalog = OptionBuilder
- .hasArg()
- .withDescription("Catalog a moving database or table is going to. This is " +
- "required if you are moving a database or table.")
- .create("toCatalog");
- Option fromDatabase = OptionBuilder
- .hasArg()
- .withDescription("Database a moving table is coming from. This is " +
- "required if you are moving a table.")
- .create("fromDatabase");
- Option toDatabase = OptionBuilder
- .hasArg()
- .withDescription("Database a moving table is going to. This is " +
- "required if you are moving a table.")
- .create("toDatabase");
-
- Options options = new Options();
- options.addOption(help);
- options.addOptionGroup(optGroup);
- options.addOption(dbTypeOpt);
- //options.addOption(metaDbTypeOpt);
- options.addOption(userNameOpt);
- options.addOption(passwdOpt);
- options.addOption(urlOpt);
- options.addOption(driverOpt);
- options.addOption(dbOpts);
- options.addOption(dryRunOpt);
- options.addOption(verboseOpt);
- options.addOption(serversOpt);
- options.addOption(catalogLocation);
- options.addOption(catalogDescription);
- options.addOption(ifNotExists);
- options.addOption(fromCatalog);
- options.addOption(toCatalog);
- options.addOption(fromDatabase);
- options.addOption(toDatabase);
- options.addOption(hiveUserOpt);
- options.addOption(hivePasswdOpt);
- options.addOption(hiveDbOpt);
- if (additionalOptions != null) options.addOptionGroup(additionalOptions);
-
- return options;
- }
-
- private final CommandLine cl;
- private final String dbType;
- private final String metaDbType;
-
- public SchemaToolCommandLine(String[] args, OptionGroup additionalOptions) throws ParseException {
- cmdLineOptions = createOptions(additionalOptions);
- cl = getCommandLine(args);
- if (cl.hasOption("help")) {
- printAndExit(null);
- }
-
- dbType = cl.getOptionValue("dbType");
- metaDbType = cl.getOptionValue("metaDbType");
-
- validate();
- }
-
- private CommandLine getCommandLine(String[] args) throws ParseException {
- try {
- CommandLineParser parser = new GnuParser();
- return parser.parse(cmdLineOptions, args);
- } catch (ParseException e) {
- printAndExit("HiveSchemaTool:Parsing failed. Reason: " + e.getLocalizedMessage());
- return null;
- }
- }
-
- private static final Set VALID_DB_TYPES = ImmutableSet.of(HiveSchemaHelper.DB_DERBY,
- HiveSchemaHelper.DB_HIVE, HiveSchemaHelper.DB_MSSQL, HiveSchemaHelper.DB_MYSQL,
- HiveSchemaHelper.DB_POSTGRACE, HiveSchemaHelper.DB_ORACLE);
-
- private static final Set VALID_META_DB_TYPES = ImmutableSet.of(HiveSchemaHelper.DB_DERBY,
- HiveSchemaHelper.DB_MSSQL, HiveSchemaHelper.DB_MYSQL, HiveSchemaHelper.DB_POSTGRACE,
- HiveSchemaHelper.DB_ORACLE);
-
- private void validate() throws ParseException {
- if (!VALID_DB_TYPES.contains(dbType)) {
- printAndExit("Unsupported dbType " + dbType);
- }
-
- if (metaDbType != null) {
- if (!dbType.equals(HiveSchemaHelper.DB_HIVE)) {
- printAndExit("metaDbType may only be set if dbType is hive");
- }
- if (!VALID_META_DB_TYPES.contains(metaDbType)) {
- printAndExit("Unsupported metaDbType " + metaDbType);
- }
- } else if (dbType.equalsIgnoreCase(HiveSchemaHelper.DB_HIVE)) {
- System.err.println();
- printAndExit("metaDbType must be set if dbType is hive");
- }
-
- if ((cl.hasOption("createCatalog")) && !cl.hasOption("catalogLocation")) {
- System.err.println();
- printAndExit("catalogLocation must be set for createCatalog");
- }
-
- if (!cl.hasOption("createCatalog") && !cl.hasOption("alterCatalog") &&
- (cl.hasOption("catalogLocation") || cl.hasOption("catalogDescription"))) {
- printAndExit("catalogLocation and catalogDescription may be set only for createCatalog and alterCatalog");
- }
-
- if (!cl.hasOption("createCatalog") && cl.hasOption("ifNotExists")) {
- printAndExit("ifNotExists may be set only for createCatalog");
- }
-
- if (cl.hasOption("moveDatabase") &&
- (!cl.hasOption("fromCatalog") || !cl.hasOption("toCatalog"))) {
- printAndExit("fromCatalog and toCatalog must be set for moveDatabase");
- }
-
- if (cl.hasOption("moveTable") &&
- (!cl.hasOption("fromCatalog") || !cl.hasOption("toCatalog") ||
- !cl.hasOption("fromDatabase") || !cl.hasOption("toDatabase"))) {
- printAndExit("fromCatalog, toCatalog, fromDatabase and toDatabase must be set for moveTable");
- }
-
- if ((!cl.hasOption("moveDatabase") && !cl.hasOption("moveTable")) &&
- (cl.hasOption("fromCatalog") || cl.hasOption("toCatalog"))) {
- printAndExit("fromCatalog and toCatalog may be set only for moveDatabase and moveTable");
- }
-
- if (!cl.hasOption("moveTable") &&
- (cl.hasOption("fromDatabase") || cl.hasOption("toDatabase"))) {
- printAndExit("fromDatabase and toDatabase may be set only for moveTable");
- }
- }
-
- private void printAndExit(String reason) throws ParseException {
- if (reason != null) {
- System.err.println(reason);
- }
- HelpFormatter formatter = new HelpFormatter();
- formatter.printHelp("schemaTool", cmdLineOptions);
- if (reason != null) {
- throw new ParseException(reason);
- } else {
- System.exit(0);
- }
- }
-
- public String getDbType() {
- return dbType;
- }
-
- public String getMetaDbType() {
- return metaDbType;
- }
-
- boolean hasOption(String opt) {
- return cl.hasOption(opt);
- }
-
- String getOptionValue(String opt) {
- return cl.getOptionValue(opt);
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTask.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTask.java
deleted file mode 100644
index 87ef6b5..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTask.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-
-public abstract class SchemaToolTask {
- protected MetastoreSchemaTool schemaTool;
-
- void setHiveSchemaTool(MetastoreSchemaTool schemaTool) {
- this.schemaTool = schemaTool;
- }
-
- abstract void setCommandLineArguments(SchemaToolCommandLine cl);
-
- abstract void execute() throws HiveMetaException;
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskAlterCatalog.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskAlterCatalog.java
deleted file mode 100644
index 4e3b3d1..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskAlterCatalog.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import static org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Print Hive version and schema version.
- */
-class SchemaToolTaskAlterCatalog extends SchemaToolTask {
- private static final Logger LOG = LoggerFactory.getLogger(SchemaToolTaskAlterCatalog.class.getName());
-
- private String catName;
- private String location;
- private String description;
-
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- catName = normalizeIdentifier(cl.getOptionValue("alterCatalog"));
- location = cl.getOptionValue("catalogLocation");
- description = cl.getOptionValue("catalogDescription");
- }
-
- private static final String UPDATE_CATALOG_STMT =
- "update CTLGS " +
- " set LOCATION_URI = %s, " +
- " DESC = %s " +
- " where NAME = '%s'";
-
- @Override
- void execute() throws HiveMetaException {
- if (location == null && description == null) {
- throw new HiveMetaException("Asked to update catalog " + catName + " but not given any changes to update");
- }
- System.out.println("Updating catalog " + catName);
-
- Connection conn = schemaTool.getConnectionToMetastore(true);
- boolean success = false;
- try {
- conn.setAutoCommit(false);
- try (Statement stmt = conn.createStatement()) {
- Object updateLocation = location == null ? schemaTool.quote("LOCATION_URI") : "'" + location + "'";
- Object updateDescription = description == null ? schemaTool.quote("DESC") : "'" + description + "'";
- String update = String.format(schemaTool.quote(UPDATE_CATALOG_STMT), updateLocation, updateDescription,
- catName);
- LOG.debug("Going to run " + update);
- int count = stmt.executeUpdate(update);
- if (count != 1) {
- throw new HiveMetaException("Failed to find catalog " + catName + " to update");
- }
- conn.commit();
- success = true;
- }
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to update catalog", e);
- } finally {
- try {
- if (!success) {
- conn.rollback();
- }
- } catch (SQLException e) {
- // Not really much we can do here.
- LOG.error("Failed to rollback, everything will probably go bad from here.", e);
- }
- }
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskCreateCatalog.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskCreateCatalog.java
deleted file mode 100644
index 7857970..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskCreateCatalog.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import static org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier;
-
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Print Hive version and schema version.
- */
-class SchemaToolTaskCreateCatalog extends SchemaToolTask {
- private static final Logger LOG = LoggerFactory.getLogger(SchemaToolTaskCreateCatalog.class.getName());
-
- private String catName;
- private String location;
- private String description;
- private boolean ifNotExists;
-
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- catName = normalizeIdentifier(cl.getOptionValue("createCatalog"));
- location = cl.getOptionValue("catalogLocation");
- description = cl.getOptionValue("catalogDescription");
- ifNotExists = cl.hasOption("ifNotExists");
- }
-
- @Override
- void execute() throws HiveMetaException {
- System.out.println("Create catalog " + catName + " at location " + location);
-
- Connection conn = schemaTool.getConnectionToMetastore(true);
- boolean success = false;
- try {
- conn.setAutoCommit(false);
- try (Statement stmt = conn.createStatement()) {
- // If they set ifNotExists check for existence first, and bail if it exists. This is
- // more reliable then attempting to parse the error message from the SQLException.
- if (ifNotExists && catalogExists(stmt)) {
- return;
- }
-
- int catNum = getNextCatalogId(stmt);
- addCatalog(conn, stmt, catNum);
- success = true;
- }
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to add catalog", e);
- } finally {
- try {
- if (!success) {
- conn.rollback();
- }
- } catch (SQLException e) {
- // Not really much we can do here.
- LOG.error("Failed to rollback, everything will probably go bad from here.", e);
- }
- }
- }
-
- private static final String CATALOG_EXISTS_QUERY =
- "select NAME " +
- " from CTLGS " +
- " where NAME = '%s'";
-
- private boolean catalogExists(Statement stmt) throws SQLException {
- String query = String.format(schemaTool.quote(CATALOG_EXISTS_QUERY), catName);
- LOG.debug("Going to run " + query);
- try (ResultSet rs = stmt.executeQuery(query)) {
- if (rs.next()) {
- System.out.println("Catalog " + catName + " already exists");
- return true;
- }
- }
-
- return false;
- }
-
- private static final String NEXT_CATALOG_ID_QUERY =
- "select max(CTLG_ID) " +
- " from CTLGS";
-
- private int getNextCatalogId(Statement stmt) throws SQLException, HiveMetaException {
- String query = schemaTool.quote(NEXT_CATALOG_ID_QUERY);
- LOG.debug("Going to run " + query);
- try (ResultSet rs = stmt.executeQuery(query)) {
- if (!rs.next()) {
- throw new HiveMetaException("No catalogs found, have you upgraded the database?");
- }
- int nextId = rs.getInt(1) + 1;
- // We need to stay out of the way of any sequences used by the underlying database.
- // Otherwise the next time the client tries to add a catalog we'll get an error.
- // There should never be billions of catalogs, so we'll shift our sequence number up
- // there to avoid clashes.
- int floor = 1 << 30;
- return Math.max(nextId, floor);
- }
- }
-
- private static final String ADD_CATALOG_STMT =
- "insert into CTLGS (CTLG_ID, NAME, DESC, LOCATION_URI) " +
- " values (%d, '%s', '%s', '%s')";
-
- private void addCatalog(Connection conn, Statement stmt, int catNum) throws SQLException {
- String update = String.format(schemaTool.quote(ADD_CATALOG_STMT), catNum, catName, description, location);
- LOG.debug("Going to run " + update);
- stmt.execute(update);
- conn.commit();
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskCreateUser.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskCreateUser.java
deleted file mode 100644
index a9ad005..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskCreateUser.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-public class SchemaToolTaskCreateUser extends SchemaToolTask {
- private static final Logger LOG = LoggerFactory.getLogger(SchemaToolTaskCreateUser.class);
-
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
-
- }
-
- @Override
- void execute() throws HiveMetaException {
- schemaTool.testConnectionToMetastore();
- System.out.println("Starting user creation");
-
- String scriptDir = schemaTool.getMetaStoreSchemaInfo().getMetaStoreScriptDir();
- String protoCreateFile = schemaTool.getMetaStoreSchemaInfo().getCreateUserScript();
-
- try {
- File createFile = subUserAndPassword(scriptDir, protoCreateFile);
- System.out.println("Creation script " + createFile.getAbsolutePath());
- if (!schemaTool.isDryRun()) {
- if ("oracle".equals(schemaTool.getDbType())) oracleCreateUserHack(createFile);
- else schemaTool.execSql(createFile.getParent(), createFile.getName());
- System.out.println("User creation completed");
- }
- } catch (IOException e) {
- throw new HiveMetaException("User creation FAILED!" +
- " Metastore unusable !!", e);
- }
-
- }
-
- private File subUserAndPassword(String parent, String filename) throws IOException {
- File createFile = File.createTempFile("create-hive-user-" + schemaTool.getDbType(), ".sql");
- BufferedWriter writer = new BufferedWriter(new FileWriter(createFile));
- File proto = new File(parent, filename);
- BufferedReader reader = new BufferedReader(new FileReader(proto));
- reader.lines()
- .map(s -> s.replace("_REPLACE_WITH_USER_", schemaTool.getHiveUser())
- .replace("_REPLACE_WITH_PASSWD_", schemaTool.getHivePasswd())
- .replace("_REPLACE_WITH_DB_", schemaTool.getHiveDb()))
- .forEach(s -> {
- try {
- writer.write(s);
- writer.newLine();
- } catch (IOException e) {
- throw new RuntimeException("Unable to write to tmp file ", e);
- }
- });
- reader.close();
- writer.close();
- return createFile;
- }
-
- private void oracleCreateUserHack(File createFile) throws HiveMetaException {
- LOG.debug("Found oracle, hacking our way through it rather than using SqlLine");
- try (BufferedReader reader = new BufferedReader(new FileReader(createFile))) {
- try (Connection conn = schemaTool.getConnectionToMetastore(false)) {
- try (Statement stmt = conn.createStatement()) {
- reader.lines()
- .forEach(s -> {
- assert s.charAt(s.length() - 1) == ';';
- try {
- stmt.execute(s.substring(0, s.length() - 1));
- } catch (SQLException e) {
- LOG.error("statement <" + s.substring(0, s.length() - 2) + "> failed", e);
- throw new RuntimeException(e);
- }
- });
- }
- }
- } catch (IOException e) {
- LOG.error("Caught IOException trying to read modified create user script " +
- createFile.getAbsolutePath(), e);
- throw new HiveMetaException(e);
- } catch (HiveMetaException e) {
- LOG.error("Failed to connect to RDBMS", e);
- throw e;
- } catch (SQLException e) {
- LOG.error("Got SQLException", e);
- }
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInfo.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInfo.java
deleted file mode 100644
index cd1d57b..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInfo.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
-
-/**
- * Print Hive version and schema version.
- */
-class SchemaToolTaskInfo extends SchemaToolTask {
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- // do nothing
- }
-
- @Override
- void execute() throws HiveMetaException {
- String hiveVersion = schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion();
- MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(true);
- String dbVersion = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo);
-
- System.out.println("Hive distribution version:\t " + hiveVersion);
- System.out.println("Metastore schema version:\t " + dbVersion);
-
- schemaTool.assertCompatibleVersion(hiveVersion, dbVersion);
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInit.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInit.java
deleted file mode 100644
index e3fa495..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInit.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-
-/**
- * Initialize the metastore schema.
- */
-class SchemaToolTaskInit extends SchemaToolTask {
- private boolean validate = true;
- private String toVersion;
-
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- if (cl.hasOption("initSchemaTo")) {
- this.toVersion = cl.getOptionValue("initSchemaTo");
- this.validate = false;
- }
- }
-
- private void ensureToVersion() throws HiveMetaException {
- if (toVersion != null) {
- return;
- }
-
- // If null then current hive version is used
- toVersion = schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion();
- System.out.println("Initializing the schema to: " + toVersion);
- }
-
- @Override
- void execute() throws HiveMetaException {
- ensureToVersion();
-
- schemaTool.testConnectionToMetastore();
- System.out.println("Starting metastore schema initialization to " + toVersion);
-
- String initScriptDir = schemaTool.getMetaStoreSchemaInfo().getMetaStoreScriptDir();
- String initScriptFile = schemaTool.getMetaStoreSchemaInfo().generateInitFileName(toVersion);
-
- try {
- System.out.println("Initialization script " + initScriptFile);
- if (!schemaTool.isDryRun()) {
- schemaTool.execSql(initScriptDir, initScriptFile);
- System.out.println("Initialization script completed");
- }
- } catch (IOException e) {
- throw new HiveMetaException("Schema initialization FAILED! Metastore state would be inconsistent!", e);
- }
-
- if (validate) {
- schemaTool.verifySchemaVersion();
- }
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInitOrUpgrade.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInitOrUpgrade.java
deleted file mode 100644
index 365fb36..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskInitOrUpgrade.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Perform metastore schema init or upgrade based on schema version
- */
-
-public class SchemaToolTaskInitOrUpgrade extends SchemaToolTask {
- private static final Logger LOG = LoggerFactory.getLogger(SchemaToolTaskInitOrUpgrade.class);
- private SchemaToolCommandLine cl;
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- this.cl = cl;
- }
-
- @Override
- void execute() throws HiveMetaException {
- HiveSchemaHelper.MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(true);
- String dbVersion = null;
- try {
- dbVersion = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo);
- } catch (HiveMetaException e) {
- LOG.info("Exception getting db version:" + e.getMessage());
- LOG.info("Try to initialize db schema");
- }
- SchemaToolTask task;
- if (dbVersion == null) {
- task = new SchemaToolTaskInit();
- } else {
- task = new SchemaToolTaskUpgrade();
- }
- task.setHiveSchemaTool(schemaTool);
- task.setCommandLineArguments(cl);
- task.execute();
- }
-}
\ No newline at end of file
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskMoveDatabase.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskMoveDatabase.java
deleted file mode 100644
index 8a9b9d1..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskMoveDatabase.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import static org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Print Hive version and schema version.
- */
-class SchemaToolTaskMoveDatabase extends SchemaToolTask {
- private static final Logger LOG = LoggerFactory.getLogger(SchemaToolTaskMoveDatabase.class.getName());
-
- private String fromCatName;
- private String toCatName;
- private String dbName;
-
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- fromCatName = normalizeIdentifier(cl.getOptionValue("fromCatalog"));
- toCatName = normalizeIdentifier(cl.getOptionValue("toCatalog"));
- dbName = normalizeIdentifier(cl.getOptionValue("moveDatabase"));
- }
-
- @Override
- void execute() throws HiveMetaException {
- System.out.println(String.format("Moving database %s from catalog %s to catalog %s",
- dbName, fromCatName, toCatName));
- Connection conn = schemaTool.getConnectionToMetastore(true);
- boolean success = false;
- try {
- conn.setAutoCommit(false);
- try (Statement stmt = conn.createStatement()) {
- updateCatalogNameInTable(stmt, "DBS", "CTLG_NAME", "NAME", fromCatName, toCatName, dbName, false);
- updateCatalogNameInTable(stmt, "TAB_COL_STATS", "CAT_NAME", "DB_NAME", fromCatName, toCatName, dbName, true);
- updateCatalogNameInTable(stmt, "PART_COL_STATS", "CAT_NAME", "DB_NAME", fromCatName, toCatName, dbName, true);
- updateCatalogNameInTable(stmt, "PARTITION_EVENTS", "CAT_NAME", "DB_NAME", fromCatName, toCatName, dbName, true);
- updateCatalogNameInTable(stmt, "NOTIFICATION_LOG", "CAT_NAME", "DB_NAME", fromCatName, toCatName, dbName, true);
- conn.commit();
- success = true;
- }
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to move database", e);
- } finally {
- try {
- if (!success) {
- conn.rollback();
- }
- } catch (SQLException e) {
- // Not really much we can do here.
- LOG.error("Failed to rollback, everything will probably go bad from here.");
- }
- }
- }
-
- private static final String UPDATE_CATALOG_NAME_STMT =
- "update %s " +
- " set %s = '%s' " +
- " where %s = '%s' " +
- " and %s = '%s'";
-
- private void updateCatalogNameInTable(Statement stmt, String tableName, String catColName, String dbColName,
- String fromCatName, String toCatName, String dbName, boolean zeroUpdatesOk)
- throws HiveMetaException, SQLException {
- String update = String.format(schemaTool.quote(UPDATE_CATALOG_NAME_STMT), tableName, catColName, toCatName,
- catColName, fromCatName, dbColName, dbName);
- LOG.debug("Going to run " + update);
- int numUpdated = stmt.executeUpdate(update);
- if (numUpdated != 1 && !(zeroUpdatesOk && numUpdated == 0)) {
- throw new HiveMetaException("Failed to properly update the " + tableName +
- " table. Expected to update 1 row but instead updated " + numUpdated);
- }
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskMoveTable.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskMoveTable.java
deleted file mode 100644
index a8f9228..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskMoveTable.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import static org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier;
-
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Print Hive version and schema version.
- */
-class SchemaToolTaskMoveTable extends SchemaToolTask {
- private static final Logger LOG = LoggerFactory.getLogger(SchemaToolTaskMoveTable.class.getName());
-
- private String fromCat;
- private String toCat;
- private String fromDb;
- private String toDb;
- private String tableName;
-
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- fromCat = normalizeIdentifier(cl.getOptionValue("fromCatalog"));
- toCat = normalizeIdentifier(cl.getOptionValue("toCatalog"));
- fromDb = normalizeIdentifier(cl.getOptionValue("fromDatabase"));
- toDb = normalizeIdentifier(cl.getOptionValue("toDatabase"));
- tableName = normalizeIdentifier(cl.getOptionValue("moveTable"));
- }
-
- @Override
- void execute() throws HiveMetaException {
- Connection conn = schemaTool.getConnectionToMetastore(true);
- boolean success = false;
- try {
- conn.setAutoCommit(false);
- try (Statement stmt = conn.createStatement()) {
- updateTableId(stmt);
- updateDbNameForTable(stmt, "TAB_COL_STATS", "TABLE_NAME", fromCat, toCat, fromDb, toDb, tableName);
- updateDbNameForTable(stmt, "PART_COL_STATS", "TABLE_NAME", fromCat, toCat, fromDb, toDb, tableName);
- updateDbNameForTable(stmt, "PARTITION_EVENTS", "TBL_NAME", fromCat, toCat, fromDb, toDb, tableName);
- updateDbNameForTable(stmt, "NOTIFICATION_LOG", "TBL_NAME", fromCat, toCat, fromDb, toDb, tableName);
- conn.commit();
- success = true;
- }
- } catch (SQLException se) {
- throw new HiveMetaException("Failed to move table", se);
- } finally {
- try {
- if (!success) {
- conn.rollback();
- }
- } catch (SQLException e) {
- // Not really much we can do here.
- LOG.error("Failed to rollback, everything will probably go bad from here.");
- }
-
- }
- }
-
- private static final String UPDATE_TABLE_ID_STMT =
- "update TBLS " +
- " set DB_ID = %d " +
- " where DB_ID = %d " +
- " and TBL_NAME = '%s'";
-
- private void updateTableId(Statement stmt) throws SQLException, HiveMetaException {
- // Find the old database id
- long oldDbId = getDbId(stmt, fromDb, fromCat);
-
- // Find the new database id
- long newDbId = getDbId(stmt, toDb, toCat);
-
- String update = String.format(schemaTool.quote(UPDATE_TABLE_ID_STMT), newDbId, oldDbId, tableName);
- LOG.debug("Going to run " + update);
- int numUpdated = stmt.executeUpdate(update);
- if (numUpdated != 1) {
- throw new HiveMetaException(
- "Failed to properly update TBLS table. Expected to update " +
- "1 row but instead updated " + numUpdated);
- }
- }
-
- private static final String DB_ID_QUERY =
- "select DB_ID " +
- " from DBS " +
- " where NAME = '%s' " +
- " and CTLG_NAME = '%s'";
-
- private long getDbId(Statement stmt, String db, String catalog) throws SQLException, HiveMetaException {
- String query = String.format(schemaTool.quote(DB_ID_QUERY), db, catalog);
- LOG.debug("Going to run " + query);
- try (ResultSet rs = stmt.executeQuery(query)) {
- if (!rs.next()) {
- throw new HiveMetaException("Unable to find database " + fromDb);
- }
- return rs.getLong(1);
- }
- }
-
- private static final String UPDATE_DB_NAME_STMT =
- "update %s " +
- " set CAT_NAME = '%s', " +
- " DB_NAME = '%s' " +
- " where CAT_NAME = '%s' " +
- " and DB_NAME = '%s' " +
- " and %s = '%s'";
-
- private void updateDbNameForTable(Statement stmt, String tableName, String tableColumnName, String fromCat,
- String toCat, String fromDb, String toDb, String hiveTblName) throws HiveMetaException, SQLException {
- String update = String.format(schemaTool.quote(UPDATE_DB_NAME_STMT), tableName, toCat, toDb, fromCat, fromDb,
- tableColumnName, hiveTblName);
-
- LOG.debug("Going to run " + update);
- int numUpdated = stmt.executeUpdate(update);
- if (numUpdated > 1 || numUpdated < 0) {
- throw new HiveMetaException("Failed to properly update the " + tableName +
- " table. Expected to update 1 row but instead updated " + numUpdated);
- }
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskUpgrade.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskUpgrade.java
deleted file mode 100644
index 5e71609..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskUpgrade.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
-
-/**
- * Perform metastore schema upgrade.
- */
-class SchemaToolTaskUpgrade extends SchemaToolTask {
- private String fromVersion;
-
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- if (cl.hasOption("upgradeSchemaFrom")) {
- this.fromVersion = cl.getOptionValue("upgradeSchemaFrom");
- }
- }
-
- private void ensureFromVersion() throws HiveMetaException {
- if (fromVersion != null) {
- return;
- }
-
- // If null, then read from the metastore
- MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(false);
- fromVersion = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo);
- if (fromVersion == null || fromVersion.isEmpty()) {
- throw new HiveMetaException("Schema version not stored in the metastore. " +
- "Metastore schema is too old or corrupt. Try specifying the version manually");
- }
- System.out.println("Upgrading from the version " + fromVersion);
- }
-
- @Override
- void execute() throws HiveMetaException {
- ensureFromVersion();
-
- if (schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion().equals(fromVersion)) {
- System.out.println("No schema upgrade required from version " + fromVersion);
- return;
- }
-
- // Find the list of scripts to execute for this upgrade
- List upgradeScripts = schemaTool.getMetaStoreSchemaInfo().getUpgradeScripts(fromVersion);
- schemaTool.testConnectionToMetastore();
- System.out.println("Starting upgrade metastore schema from version " + fromVersion + " to " +
- schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion());
- String scriptDir = schemaTool.getMetaStoreSchemaInfo().getMetaStoreScriptDir();
- try {
- for (String scriptFile : upgradeScripts) {
- System.out.println("Upgrade script " + scriptFile);
- if (!schemaTool.isDryRun()) {
- runPreUpgrade(scriptDir, scriptFile);
- schemaTool.execSql(scriptDir, scriptFile);
- System.out.println("Completed " + scriptFile);
- }
- }
- } catch (IOException e) {
- throw new HiveMetaException("Upgrade FAILED! Metastore state would be inconsistent !!", e);
- }
-
- // Revalidated the new version after upgrade
- schemaTool.verifySchemaVersion();
- }
-
- /**
- * Run pre-upgrade scripts corresponding to a given upgrade script,
- * if any exist. The errors from pre-upgrade are ignored.
- * Pre-upgrade scripts typically contain setup statements which
- * may fail on some database versions and failure is ignorable.
- *
- * @param scriptDir upgrade script directory name
- * @param scriptFile upgrade script file name
- */
- private void runPreUpgrade(String scriptDir, String scriptFile) {
- for (int i = 0;; i++) {
- String preUpgradeScript = schemaTool.getMetaStoreSchemaInfo().getPreUpgradeScriptName(i, scriptFile);
- File preUpgradeScriptFile = new File(scriptDir, preUpgradeScript);
- if (!preUpgradeScriptFile.isFile()) {
- break;
- }
-
- try {
- schemaTool.execSql(scriptDir, preUpgradeScript);
- System.out.println("Completed " + preUpgradeScript);
- } catch (Exception e) {
- // Ignore the pre-upgrade script errors
- System.err.println("Warning in pre-upgrade script " + preUpgradeScript + ": " + e.getMessage());
- if (schemaTool.isVerbose()) {
- e.printStackTrace();
- }
- }
- }
- }
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskValidate.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskValidate.java
deleted file mode 100644
index d86b457..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/SchemaToolTaskValidate.java
+++ /dev/null
@@ -1,630 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.net.URI;
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang3.tuple.Pair;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
-import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.NestedScriptParser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableMap;
-
-/**
- * Print Hive version and schema version.
- */
-class SchemaToolTaskValidate extends SchemaToolTask {
- private static final Logger LOG = LoggerFactory.getLogger(SchemaToolTaskValidate.class.getName());
-
- @Override
- void setCommandLineArguments(SchemaToolCommandLine cl) {
- // do nothing
- }
-
- @Override
- void execute() throws HiveMetaException {
- System.out.println("Starting metastore validation\n");
- Connection conn = schemaTool.getConnectionToMetastore(false);
- boolean success = true;
- try {
- success &= validateSchemaVersions();
- success &= validateSequences(conn);
- success &= validateSchemaTables(conn);
- success &= validateLocations(conn, schemaTool.getValidationServers());
- success &= validateColumnNullValues(conn);
- } finally {
- if (conn != null) {
- try {
- conn.close();
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to close metastore connection", e);
- }
- }
- }
-
- System.out.print("Done with metastore validation: ");
- if (!success) {
- System.out.println("[FAIL]");
- throw new HiveMetaException("Validation failed");
- } else {
- System.out.println("[SUCCESS]");
- }
- }
-
- boolean validateSchemaVersions() throws HiveMetaException {
- System.out.println("Validating schema version");
- try {
- String hiveSchemaVersion = schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion();
- MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(false);
- String newSchemaVersion = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo);
- schemaTool.assertCompatibleVersion(hiveSchemaVersion, newSchemaVersion);
- } catch (HiveMetaException hme) {
- if (hme.getMessage().contains("Metastore schema version is not compatible") ||
- hme.getMessage().contains("Multiple versions were found in metastore") ||
- hme.getMessage().contains("Could not find version info in metastore VERSION table")) {
- System.err.println(hme.getMessage());
- System.out.println("[FAIL]\n");
- return false;
- } else {
- throw hme;
- }
- }
- System.out.println("[SUCCESS]\n");
- return true;
- }
-
- private static final String QUERY_SEQ =
- " select t.NEXT_VAL" +
- " from SEQUENCE_TABLE t " +
- " where t.SEQUENCE_NAME = ? " +
- "order by t.SEQUENCE_NAME";
-
- private static final String QUERY_MAX_ID =
- "select max(%s)" +
- " from %s";
-
- @VisibleForTesting
- boolean validateSequences(Connection conn) throws HiveMetaException {
- Map> seqNameToTable =
- new ImmutableMap.Builder>()
- .put("MDatabase", Pair.of("DBS", "DB_ID"))
- .put("MRole", Pair.of("ROLES", "ROLE_ID"))
- .put("MGlobalPrivilege", Pair.of("GLOBAL_PRIVS", "USER_GRANT_ID"))
- .put("MTable", Pair.of("TBLS","TBL_ID"))
- .put("MStorageDescriptor", Pair.of("SDS", "SD_ID"))
- .put("MSerDeInfo", Pair.of("SERDES", "SERDE_ID"))
- .put("MColumnDescriptor", Pair.of("CDS", "CD_ID"))
- .put("MTablePrivilege", Pair.of("TBL_PRIVS", "TBL_GRANT_ID"))
- .put("MTableColumnStatistics", Pair.of("TAB_COL_STATS", "CS_ID"))
- .put("MPartition", Pair.of("PARTITIONS", "PART_ID"))
- .put("MPartitionColumnStatistics", Pair.of("PART_COL_STATS", "CS_ID"))
- .put("MFunction", Pair.of("FUNCS", "FUNC_ID"))
- .put("MIndex", Pair.of("IDXS", "INDEX_ID"))
- .put("MStringList", Pair.of("SKEWED_STRING_LIST", "STRING_LIST_ID"))
- .build();
-
- System.out.println("Validating sequence number for SEQUENCE_TABLE");
-
- boolean isValid = true;
- try {
- Statement stmt = conn.createStatement();
- for (Map.Entry> e : seqNameToTable.entrySet()) {
- String tableName = e.getValue().getLeft();
- String tableKey = e.getValue().getRight();
- String fullSequenceName = "org.apache.hadoop.hive.metastore.model." + e.getKey();
- String seqQuery = schemaTool.quote(QUERY_SEQ);
- String maxIdQuery = String.format(schemaTool.quote(QUERY_MAX_ID), tableKey, tableName);
-
- ResultSet res = stmt.executeQuery(maxIdQuery);
- if (res.next()) {
- long maxId = res.getLong(1);
- if (maxId > 0) {
- PreparedStatement stmtSeq = conn.prepareStatement(seqQuery);
- stmtSeq.setString(1, fullSequenceName);
- ResultSet resSeq = stmtSeq.executeQuery();
- if (!resSeq.next()) {
- isValid = false;
- System.err.println("Missing SEQUENCE_NAME " + e.getKey() + " from SEQUENCE_TABLE");
- } else if (resSeq.getLong(1) < maxId) {
- isValid = false;
- System.err.println("NEXT_VAL for " + e.getKey() + " in SEQUENCE_TABLE < max(" + tableKey +
- ") in " + tableName);
- }
- }
- }
- }
-
- System.out.println(isValid ? "[SUCCESS]\n" :"[FAIL]\n");
- return isValid;
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to validate sequence number for SEQUENCE_TABLE", e);
- }
- }
-
- @VisibleForTesting
- boolean validateSchemaTables(Connection conn) throws HiveMetaException {
- System.out.println("Validating metastore schema tables");
- String version = null;
- try {
- MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(false);
- version = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo);
- } catch (HiveMetaException he) {
- System.err.println("Failed to determine schema version from Hive Metastore DB. " + he.getMessage());
- System.out.println("Failed in schema table validation.");
- LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage(), he);
- return false;
- }
-
- Connection hmsConn = schemaTool.getConnectionToMetastore(false);
-
- LOG.debug("Validating tables in the schema for version " + version);
- List dbTables = new ArrayList<>();
- ResultSet rs = null;
- try {
- String schema = null;
- try {
- schema = hmsConn.getSchema();
- } catch (SQLFeatureNotSupportedException e) {
- LOG.debug("schema is not supported");
- }
-
- DatabaseMetaData metadata = conn.getMetaData();
- rs = metadata.getTables(null, schema, "%", new String[] {"TABLE"});
-
- while (rs.next()) {
- String table = rs.getString("TABLE_NAME");
- dbTables.add(table.toLowerCase());
- LOG.debug("Found table " + table + " in HMS dbstore");
- }
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to retrieve schema tables from Hive Metastore DB," +
- e.getMessage(), e);
- } finally {
- if (rs != null) {
- try {
- rs.close();
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to close resultset", e);
- }
- }
- }
-
- // parse the schema file to determine the tables that are expected to exist
- // we are using oracle schema because it is simpler to parse, no quotes or backticks etc
- List schemaTables = new ArrayList<>();
- List subScripts = new ArrayList<>();
-
- String baseDir = new File(schemaTool.getMetaStoreSchemaInfo().getMetaStoreScriptDir()).getParent();
- String schemaFile = new File(schemaTool.getMetaStoreSchemaInfo().getMetaStoreScriptDir(),
- schemaTool.getMetaStoreSchemaInfo().generateInitFileName(version)).getPath();
- try {
- LOG.debug("Parsing schema script " + schemaFile);
- subScripts.addAll(findCreateTable(schemaFile, schemaTables));
- while (subScripts.size() > 0) {
- schemaFile = baseDir + "/" + schemaTool.getDbType() + "/" + subScripts.remove(0);
- LOG.debug("Parsing subscript " + schemaFile);
- subScripts.addAll(findCreateTable(schemaFile, schemaTables));
- }
- } catch (Exception e) {
- System.err.println("Exception in parsing schema file. Cause:" + e.getMessage());
- System.out.println("Failed in schema table validation.");
- return false;
- }
-
- LOG.debug("Schema tables:[ " + Arrays.toString(schemaTables.toArray()) + " ]");
- LOG.debug("DB tables:[ " + Arrays.toString(dbTables.toArray()) + " ]");
-
- // now diff the lists
- schemaTables.removeAll(dbTables);
- if (schemaTables.size() > 0) {
- Collections.sort(schemaTables);
- System.err.println("Table(s) [ " + Arrays.toString(schemaTables.toArray()) + " ] " +
- "are missing from the metastore database schema.");
- System.out.println("[FAIL]\n");
- return false;
- } else {
- System.out.println("[SUCCESS]\n");
- return true;
- }
- }
-
- @VisibleForTesting
- List findCreateTable(String path, List tableList) throws Exception {
- if (!(new File(path)).exists()) {
- throw new Exception(path + " does not exist. Potentially incorrect version in the metastore VERSION table");
- }
-
- List subs = new ArrayList<>();
- NestedScriptParser sp = HiveSchemaHelper.getDbCommandParser(schemaTool.getDbType(), false);
- Pattern regexp = Pattern.compile("CREATE TABLE(\\s+IF NOT EXISTS)?\\s+(\\S+).*");
-
- try (BufferedReader reader = new BufferedReader(new FileReader(path))) {
- String line = null;
- while ((line = reader.readLine()) != null) {
- if (sp.isNestedScript(line)) {
- String subScript = sp.getScriptName(line);
- LOG.debug("Schema subscript " + subScript + " found");
- subs.add(subScript);
- continue;
- }
- line = line.replaceAll("( )+", " "); //suppress multi-spaces
- line = line.replaceAll("\\(", " ");
- line = line.replaceAll("IF NOT EXISTS ", "");
- line = line.replaceAll("`", "");
- line = line.replaceAll("'", "");
- line = line.replaceAll("\"", "");
- Matcher matcher = regexp.matcher(line);
-
- if (matcher.find()) {
- String table = matcher.group(2);
- if (schemaTool.getDbType().equals("derby")) {
- table = table.replaceAll("APP\\.", "");
- }
- tableList.add(table.toLowerCase());
- LOG.debug("Found table " + table + " in the schema");
- }
- }
- } catch (IOException ex){
- throw new Exception(ex.getMessage());
- }
-
- return subs;
- }
-
- @VisibleForTesting
- boolean validateLocations(Connection conn, URI[] defaultServers) throws HiveMetaException {
- System.out.println("Validating DFS locations");
- boolean rtn = true;
- rtn &= checkMetaStoreDBLocation(conn, defaultServers);
- rtn &= checkMetaStoreTableLocation(conn, defaultServers);
- rtn &= checkMetaStorePartitionLocation(conn, defaultServers);
- rtn &= checkMetaStoreSkewedColumnsLocation(conn, defaultServers);
- System.out.println(rtn ? "[SUCCESS]\n" : "[FAIL]\n");
- return rtn;
- }
-
- private static final String QUERY_DB_LOCATION =
- " select dbt.DB_ID, " +
- " dbt.NAME, " +
- " dbt.DB_LOCATION_URI " +
- " from DBS dbt " +
- "order by dbt.DB_ID ";
-
- private boolean checkMetaStoreDBLocation(Connection conn, URI[] defaultServers) throws HiveMetaException {
- String dbLocQuery = schemaTool.quote(QUERY_DB_LOCATION);
-
- int numOfInvalid = 0;
- try (Statement stmt = conn.createStatement();
- ResultSet res = stmt.executeQuery(dbLocQuery)) {
- while (res.next()) {
- String locValue = res.getString(3);
- String dbName = getNameOrID(res, 2, 1);
- if (!checkLocation("Database " + dbName, locValue, defaultServers)) {
- numOfInvalid++;
- }
- }
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to get DB Location Info.", e);
- }
- return numOfInvalid == 0;
- }
-
- private static final String TAB_ID_RANGE_QUERY =
- "select max(TBL_ID), " +
- " min(TBL_ID) " +
- " from TBLS ";
-
- private static final String TAB_LOC_QUERY =
- " select tbl.TBL_ID, " +
- " tbl.TBL_NAME, " +
- " sd.LOCATION, " +
- " dbt.DB_ID, " +
- " dbt.NAME " +
- " from TBLS tbl " +
- "inner join SDS sd on sd.SD_ID = tbl.SD_ID " +
- "inner join DBS dbt on tbl.DB_ID = dbt.DB_ID " +
- " where tbl.TBL_TYPE != '%s' " +
- " and tbl.TBL_ID >= ? " +
- " and tbl.TBL_ID <= ? " +
- " order by tbl.TBL_ID ";
-
- private static final int TAB_LOC_CHECK_SIZE = 2000;
-
- private boolean checkMetaStoreTableLocation(Connection conn, URI[] defaultServers)
- throws HiveMetaException {
- String tabIDRangeQuery = schemaTool.quote(TAB_ID_RANGE_QUERY);
- String tabLocQuery = String.format(schemaTool.quote(TAB_LOC_QUERY), TableType.VIRTUAL_VIEW);
-
- try {
- long maxID = 0, minID = 0;
- try (Statement stmt = conn.createStatement();
- ResultSet res = stmt.executeQuery(tabIDRangeQuery)) {
- if (res.next()) {
- maxID = res.getLong(1);
- minID = res.getLong(2);
- }
- }
-
- int numOfInvalid = 0;
- try (PreparedStatement pStmt = conn.prepareStatement(tabLocQuery)) {
- while (minID <= maxID) {
- pStmt.setLong(1, minID);
- pStmt.setLong(2, minID + TAB_LOC_CHECK_SIZE);
- try (ResultSet res = pStmt.executeQuery()) {
- while (res.next()) {
- String locValue = res.getString(3);
- String entity = "Database " + getNameOrID(res, 5, 4) + ", Table " + getNameOrID(res, 2, 1);
- if (!checkLocation(entity, locValue, defaultServers)) {
- numOfInvalid++;
- }
- }
- }
- minID += TAB_LOC_CHECK_SIZE + 1;
- }
- }
-
- return numOfInvalid == 0;
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to get Table Location Info.", e);
- }
- }
-
- private static final String QUERY_PART_ID_RANGE =
- "select max(PART_ID)," +
- " min(PART_ID)" +
- " from PARTITIONS ";
-
- private static final String QUERY_PART_LOC =
- " select pt.PART_ID, " +
- " pt.PART_NAME, " +
- " sd.LOCATION, " +
- " tbl.TBL_ID, " +
- " tbl.TBL_NAME, " +
- " dbt.DB_ID, " +
- " dbt.NAME " +
- " from PARTITIONS pt " +
- "inner join SDS sd on sd.SD_ID = pt.SD_ID " +
- "inner join TBLS tbl on tbl.TBL_ID = pt.TBL_ID " +
- "inner join DBS dbt on dbt.DB_ID = tbl.DB_ID " +
- " where pt.PART_ID >= ? " +
- " and pt.PART_ID <= ? " +
- " order by tbl.TBL_ID ";
-
- private static final int PART_LOC_CHECK_SIZE = 2000;
-
- private boolean checkMetaStorePartitionLocation(Connection conn, URI[] defaultServers)
- throws HiveMetaException {
- String queryPartIDRange = schemaTool.quote(QUERY_PART_ID_RANGE);
- String queryPartLoc = schemaTool.quote(QUERY_PART_LOC);
-
- try {
- long maxID = 0, minID = 0;
- try (Statement stmt = conn.createStatement();
- ResultSet res = stmt.executeQuery(queryPartIDRange)) {
- if (res.next()) {
- maxID = res.getLong(1);
- minID = res.getLong(2);
- }
- }
-
- int numOfInvalid = 0;
- try (PreparedStatement pStmt = conn.prepareStatement(queryPartLoc)) {
- while (minID <= maxID) {
- pStmt.setLong(1, minID);
- pStmt.setLong(2, minID + PART_LOC_CHECK_SIZE);
- try (ResultSet res = pStmt.executeQuery()) {
- while (res.next()) {
- String locValue = res.getString(3);
- String entity = "Database " + getNameOrID(res, 7, 6) + ", Table " + getNameOrID(res, 5, 4) +
- ", Partition " + getNameOrID(res, 2, 1);
- if (!checkLocation(entity, locValue, defaultServers)) {
- numOfInvalid++;
- }
- }
- }
- minID += PART_LOC_CHECK_SIZE + 1;
- }
- }
-
- return numOfInvalid == 0;
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to get Partition Location Info.", e);
- }
- }
-
- private static final String QUERY_SKEWED_COL_ID_RANGE =
- "select max(STRING_LIST_ID_KID), " +
- " min(STRING_LIST_ID_KID) " +
- " from SKEWED_COL_VALUE_LOC_MAP ";
-
- private static final String QUERY_SKEWED_COL_LOC =
- " select t.TBL_NAME, " +
- " t.TBL_ID, " +
- " sk.STRING_LIST_ID_KID, " +
- " sk.LOCATION, " +
- " db.NAME, " +
- " db.DB_ID " +
- " from TBLS t " +
- " join SDS s on s.SD_ID = t.SD_ID " +
- " join DBS db on db.DB_ID = t.DB_ID " +
- " join SKEWED_COL_VALUE_LOC_MAP sk on sk.SD_ID = s.SD_ID " +
- " where sk.STRING_LIST_ID_KID >= ? " +
- " and sk.STRING_LIST_ID_KID <= ? " +
- "order by t.TBL_ID ";
-
- private static final int SKEWED_COL_LOC_CHECK_SIZE = 2000;
-
- private boolean checkMetaStoreSkewedColumnsLocation(Connection conn, URI[] defaultServers)
- throws HiveMetaException {
- String querySkewedColIDRange = schemaTool.quote(QUERY_SKEWED_COL_ID_RANGE);
- String querySkewedColLoc = schemaTool.quote(QUERY_SKEWED_COL_LOC);
-
- try {
- long maxID = 0, minID = 0;
- try (Statement stmt = conn.createStatement();
- ResultSet res = stmt.executeQuery(querySkewedColIDRange)) {
- if (res.next()) {
- maxID = res.getLong(1);
- minID = res.getLong(2);
- }
- }
-
- int numOfInvalid = 0;
- try (PreparedStatement pStmt = conn.prepareStatement(querySkewedColLoc)) {
- while (minID <= maxID) {
- pStmt.setLong(1, minID);
- pStmt.setLong(2, minID + SKEWED_COL_LOC_CHECK_SIZE);
- try (ResultSet res = pStmt.executeQuery()) {
- while (res.next()) {
- String locValue = res.getString(4);
- String entity = "Database " + getNameOrID(res, 5, 6) + ", Table " + getNameOrID(res, 1, 2) +
- ", String list " + res.getString(3);
- if (!checkLocation(entity, locValue, defaultServers)) {
- numOfInvalid++;
- }
- }
- }
- minID += SKEWED_COL_LOC_CHECK_SIZE + 1;
- }
- }
-
- return numOfInvalid == 0;
- } catch (SQLException e) {
- throw new HiveMetaException("Failed to get skewed columns location info.", e);
- }
- }
-
- /**
- * Check if the location is valid for the given entity.
- * @param entity the entity to represent a database, partition or table
- * @param entityLocation the location
- * @param defaultServers a list of the servers that the location needs to match.
- * The location host needs to match one of the given servers.
- * If empty, then no check against such list.
- * @return true if the location is valid
- */
- private boolean checkLocation(String entity, String entityLocation, URI[] defaultServers) {
- boolean isValid = true;
-
- if (entityLocation == null) {
- System.err.println(entity + ", Error: empty location");
- isValid = false;
- } else {
- try {
- URI currentUri = new Path(entityLocation).toUri();
- String scheme = currentUri.getScheme();
- String path = currentUri.getPath();
- if (StringUtils.isEmpty(scheme)) {
- System.err.println(entity + ", Location: "+ entityLocation + ", Error: missing location scheme.");
- isValid = false;
- } else if (StringUtils.isEmpty(path)) {
- System.err.println(entity + ", Location: "+ entityLocation + ", Error: missing location path.");
- isValid = false;
- } else if (ArrayUtils.isNotEmpty(defaultServers) && currentUri.getAuthority() != null) {
- String authority = currentUri.getAuthority();
- boolean matchServer = false;
- for(URI server : defaultServers) {
- if (StringUtils.equalsIgnoreCase(server.getScheme(), scheme) &&
- StringUtils.equalsIgnoreCase(server.getAuthority(), authority)) {
- matchServer = true;
- break;
- }
- }
- if (!matchServer) {
- System.err.println(entity + ", Location: " + entityLocation + ", Error: mismatched server.");
- isValid = false;
- }
- }
-
- // if there is no path element other than "/", report it but not fail
- if (isValid && StringUtils.containsOnly(path, "/")) {
- System.err.println(entity + ", Location: "+ entityLocation + ", Warn: location set to root, " +
- "not a recommended config.");
- }
- } catch (Exception pe) {
- System.err.println(entity + ", Error: invalid location - " + pe.getMessage());
- isValid =false;
- }
- }
-
- return isValid;
- }
-
- private String getNameOrID(ResultSet res, int nameInx, int idInx) throws SQLException {
- String itemName = res.getString(nameInx);
- return (itemName == null || itemName.isEmpty()) ? "ID: " + res.getString(idInx) : "Name: " + itemName;
- }
-
- private static final String QUERY_COLUMN_NULL_VALUES =
- " select t.*" +
- " from TBLS t" +
- " where t.SD_ID IS NULL" +
- " and (t.TBL_TYPE = '" + TableType.EXTERNAL_TABLE + "' or" +
- " t.TBL_TYPE