diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java b/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java index 181f0d2..1be8561 100644 --- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java +++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java @@ -29,6 +29,7 @@ import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; +import java.util.ArrayList; import java.util.IllegalFormatException; import java.util.List; @@ -158,9 +159,9 @@ public static String getValidConfVar(HiveConf.ConfVars confVar, HiveConf hiveCon * * @param scriptDir upgrade script directory * @param scriptFile upgrade script file - * @return string of sql commands + * @return the list of sql commands */ - public String buildCommand(String scriptDir, String scriptFile) + public List buildCommands(String scriptDir, String scriptFile) throws IllegalFormatException, IOException; } @@ -222,12 +223,12 @@ public boolean needsQuotedIdentifier() { } @Override - public String buildCommand( + public List buildCommands( String scriptDir, String scriptFile) throws IllegalFormatException, IOException { BufferedReader bfReader = new BufferedReader(new FileReader(scriptDir + File.separatorChar + scriptFile)); String currLine; - StringBuilder sb = new StringBuilder(); + List commands = new ArrayList(); String currentCommand = null; while ((currLine = bfReader.readLine()) != null) { currLine = currLine.trim(); @@ -251,18 +252,16 @@ public String buildCommand( if (isNestedScript(currentCommand)) { // if this is a nested sql script then flatten it String currScript = getScriptName(currentCommand); - sb.append(buildCommand(scriptDir, currScript)); + commands.addAll(buildCommands(scriptDir, currScript)); } else { - // Now we have a complete statement, process it - // write the line to buffer - sb.append(currentCommand); - sb.append(System.getProperty("line.separator")); + // Now we have a complete statement, add it + commands.add(currentCommand); } } currentCommand = null; } bfReader.close(); - return sb.toString(); + return commands; } private void setDbOpts(String dbOpts) { diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java index cd36ddf..392897f 100644 --- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java +++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java @@ -26,8 +26,6 @@ import org.apache.commons.cli.OptionGroup; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.io.output.NullOutputStream; -import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaException; @@ -38,18 +36,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.BufferedReader; -import java.io.BufferedWriter; import java.io.File; -import java.io.FileReader; -import java.io.FileWriter; import java.io.IOException; -import java.io.PrintStream; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; import java.util.List; public class HiveSchemaTool { @@ -117,8 +109,14 @@ private static void printAndExit(Options cmdLineOptions) { private Connection getConnectionToMetastore(boolean printInfo) throws HiveMetaException { - return HiveSchemaHelper.getConnectionToMetastore(userName, - passWord, printInfo, hiveConf); + try { + Connection conn = HiveSchemaHelper.getConnectionToMetastore(userName, + passWord, printInfo, hiveConf); + conn.setAutoCommit(false); + return conn; + } catch(SQLException e) { + throw new HiveMetaException(e); + } } private NestedScriptParser getDbCommandParser(String dbType) { @@ -128,16 +126,29 @@ private NestedScriptParser getDbCommandParser(String dbType) { /*** * Print Hive version and schema version - * @throws MetaException + * @throws HiveMetaException */ public void showInfo() throws HiveMetaException { - Connection metastoreConn = getConnectionToMetastore(true); - String hiveVersion = MetaStoreSchemaInfo.getHiveSchemaVersion(); - String dbVersion = getMetaStoreSchemaVersion(metastoreConn); - System.out.println("Hive distribution version:\t " + hiveVersion); - System.out.println("Metastore schema version:\t " + dbVersion); - assertCompatibleVersion(hiveVersion, dbVersion); - + Connection conn = null; + try { + conn = getConnectionToMetastore(true); + String hiveVersion = MetaStoreSchemaInfo.getHiveSchemaVersion(); + String dbVersion = getMetaStoreSchemaVersion(conn); + System.out.println("Hive distribution version:\t " + hiveVersion); + System.out.println("Metastore schema version:\t " + dbVersion); + assertCompatibleVersion(hiveVersion, dbVersion); + } finally { + try { + if (conn != null) { + if(dryRun) { + conn.rollback(); + } + conn.close(); + } + } catch (SQLException e) { + System.err.println("Failed to close the metastore connection:" + e.getMessage()); + } + } } // read schema version from metastore @@ -149,51 +160,65 @@ private String getMetaStoreSchemaVersion(Connection metastoreConn) } else { versionQuery = "select t.SCHEMA_VERSION from VERSION t"; } - try(Statement stmt = metastoreConn.createStatement(); - ResultSet res = stmt.executeQuery(versionQuery)) { + + Statement stmt = null; + try{ + stmt = metastoreConn.createStatement(); + ResultSet res = stmt.executeQuery(versionQuery); if (!res.next()) { throw new HiveMetaException("Didn't find version data in metastore"); } String currentSchemaVersion = res.getString(1); + if (!dryRun) { + metastoreConn.commit(); + } return currentSchemaVersion; } catch (SQLException e) { throw new HiveMetaException("Failed to get schema version.", e); - } - finally { - try { - metastoreConn.close(); - } catch (SQLException e) { - System.err.println("Failed to close the metastore connection"); - e.printStackTrace(System.err); + } finally { + if (stmt != null) { + try { + stmt.close(); + } catch(SQLException e) { + System.err.println("Failed to close statement:" + e.getMessage()); + } } } } - // test the connection metastore using the config property - private void testConnectionToMetastore() throws HiveMetaException { - Connection conn = getConnectionToMetastore(true); - try { - conn.close(); - } catch (SQLException e) { - throw new HiveMetaException("Failed to close metastore connection", e); - } - } - - /** * check if the current schema version in metastore matches the Hive version - * @throws MetaException + * with a provided connection + * @throws HiveMetaException */ - public void verifySchemaVersion() throws HiveMetaException { - // don't check version if its a dry run - if (dryRun) { - return; - } - String newSchemaVersion = getMetaStoreSchemaVersion( - getConnectionToMetastore(false)); + private void verifySchemaVersion(Connection conn) throws HiveMetaException { + String newSchemaVersion = getMetaStoreSchemaVersion(conn); // verify that the new version is added to schema assertCompatibleVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(), newSchemaVersion); + } + /** + * check if the current schema version in metastore matches the Hive version. + * Currently used by test only. + * @throws HiveMetaException + */ + public void verifySchemaVersion() throws HiveMetaException { + Connection conn = null; + try { + conn = getConnectionToMetastore(true); + verifySchemaVersion(conn); + } finally { + try { + if (conn != null) { + if(dryRun) { + conn.rollback(); + } + conn.close(); + } + } catch (SQLException e) { + System.err.println("Failed to close the metastore connection:" + e.getMessage()); + } + } } private void assertCompatibleVersion(String hiveSchemaVersion, String dbSchemaVersion) @@ -205,54 +230,68 @@ private void assertCompatibleVersion(String hiveSchemaVersion, String dbSchemaVe } /** - * Perform metastore schema upgrade. extract the current schema version from metastore - * @throws MetaException + * Perform metastore schema upgrade. Extract the current schema version from metastore + * @throws HiveMetaException */ public void doUpgrade() throws HiveMetaException { - String fromVersion = getMetaStoreSchemaVersion( - getConnectionToMetastore(false)); - if (fromVersion == null || fromVersion.isEmpty()) { - throw new HiveMetaException("Schema version not stored in the metastore. " + - "Metastore schema is too old or corrupt. Try specifying the version manually"); - } - doUpgrade(fromVersion); + doUpgrade(null); } /** * Perform metastore schema upgrade * - * @param fromSchemaVer - * Existing version of the metastore. If null, then read from the metastore - * @throws MetaException + * @param fromSchemaVer Existing version of the metastore. If null, then read from the metastore. + * @throws HiveMetaException */ public void doUpgrade(String fromSchemaVer) throws HiveMetaException { - if (MetaStoreSchemaInfo.getHiveSchemaVersion().equals(fromSchemaVer)) { - System.out.println("No schema upgrade required from version " + fromSchemaVer); - return; - } - // Find the list of scripts to execute for this upgrade - List upgradeScripts = - metaStoreSchemaInfo.getUpgradeScripts(fromSchemaVer); - testConnectionToMetastore(); - System.out.println("Starting upgrade metastore schema from version " + - fromSchemaVer + " to " + MetaStoreSchemaInfo.getHiveSchemaVersion()); - String scriptDir = metaStoreSchemaInfo.getMetaStoreScriptDir(); + Connection conn = null; try { + conn = getConnectionToMetastore(true); + if (fromSchemaVer == null) { + fromSchemaVer = getMetaStoreSchemaVersion(conn); + if (fromSchemaVer == null || fromSchemaVer.isEmpty()) { + throw new HiveMetaException("Schema version not stored in the metastore. " + + "Metastore schema is too old or corrupt. Try specifying the version manually"); + } + } + + if (MetaStoreSchemaInfo.getHiveSchemaVersion().equals(fromSchemaVer)) { + System.out.println("No schema upgrade required from version " + fromSchemaVer); + return; + } + // Find the list of scripts to execute for this upgrade + List upgradeScripts = + metaStoreSchemaInfo.getUpgradeScripts(fromSchemaVer); + + System.out.println("Starting upgrade metastore schema from version " + + fromSchemaVer + " to " + MetaStoreSchemaInfo.getHiveSchemaVersion()); + String scriptDir = metaStoreSchemaInfo.getMetaStoreScriptDir(); + for (String scriptFile : upgradeScripts) { System.out.println("Upgrade script " + scriptFile); - if (!dryRun) { - runPreUpgrade(scriptDir, scriptFile); - runBeeLine(scriptDir, scriptFile); - System.out.println("Completed " + scriptFile); - } + runPreUpgrade(conn, scriptDir, scriptFile); + executeScript(conn, scriptDir, scriptFile); + System.out.println("Completed " + scriptFile); } - } catch (IOException eIO) { + + // Revalidated the new version after upgrade + verifySchemaVersion(conn); + } catch (IOException | SQLException eIO) { throw new HiveMetaException( "Upgrade FAILED! Metastore state would be inconsistent !!", eIO); } - - // Revalidated the new version after upgrade - verifySchemaVersion(); + finally { + try { + if (conn != null) { + if (dryRun) { + conn.rollback(); + } + conn.close(); + } + } catch (SQLException e) { + System.err.println("Failed to close the metastore connection:" + e.getMessage()); + } + } } /** @@ -261,10 +300,7 @@ public void doUpgrade(String fromSchemaVer) throws HiveMetaException { * @throws MetaException */ public void doInit() throws HiveMetaException { - doInit(MetaStoreSchemaInfo.getHiveSchemaVersion()); - - // Revalidated the new version after upgrade - verifySchemaVersion(); + doInit(null); } /** @@ -275,21 +311,39 @@ public void doInit() throws HiveMetaException { * @throws MetaException */ public void doInit(String toVersion) throws HiveMetaException { - testConnectionToMetastore(); - System.out.println("Starting metastore schema initialization to " + toVersion); - - String initScriptDir = metaStoreSchemaInfo.getMetaStoreScriptDir(); - String initScriptFile = metaStoreSchemaInfo.generateInitFileName(toVersion); + Connection conn = null; try { - System.out.println("Initialization script " + initScriptFile); - if (!dryRun) { - runBeeLine(initScriptDir, initScriptFile); - System.out.println("Initialization script completed"); + conn = getConnectionToMetastore(true); + if (toVersion == null) { + toVersion = MetaStoreSchemaInfo.getHiveSchemaVersion(); } - } catch (IOException e) { + System.out.println("Starting metastore schema initialization to " + toVersion); + + String initScriptDir = metaStoreSchemaInfo.getMetaStoreScriptDir(); + String initScriptFile = metaStoreSchemaInfo.generateInitFileName(toVersion); + + System.out.println("Initialization script " + initScriptFile); + executeScript(conn, initScriptDir, initScriptFile); + System.out.println("Initialization script completed"); + + // Revalidated the new version after initialization + verifySchemaVersion(conn); + + } catch (IOException | SQLException e) { throw new HiveMetaException("Schema initialization FAILED!" + " Metastore state would be inconsistent !!", e); + } finally { + try { + if (conn != null) { + if (dryRun) { + conn.rollback(); + } + conn.close(); + } + } catch (SQLException e) { + System.err.println("Failed to close the metastore connection:" + e.getMessage()); + } } } @@ -302,7 +356,7 @@ public void doInit(String toVersion) throws HiveMetaException { * @param scriptDir upgrade script directory name * @param scriptFile upgrade script file name */ - private void runPreUpgrade(String scriptDir, String scriptFile) { + private void runPreUpgrade(Connection conn, String scriptDir, String scriptFile) { for (int i = 0;; i++) { String preUpgradeScript = MetaStoreSchemaInfo.getPreUpgradeScriptName(i, scriptFile); @@ -312,7 +366,7 @@ private void runPreUpgrade(String scriptDir, String scriptFile) { } try { - runBeeLine(scriptDir, preUpgradeScript); + executeScript(conn, scriptDir, preUpgradeScript); System.out.println("Completed " + preUpgradeScript); } catch (Exception e) { // Ignore the pre-upgrade script errors @@ -325,69 +379,36 @@ private void runPreUpgrade(String scriptDir, String scriptFile) { } } - /*** - * Run beeline with the given metastore script. Flatten the nested scripts - * into single file. + /** + * Execute the script file against database + * @param scriptDir the script directory + * @param scriptFile the script file + * @throws SQLException */ - private void runBeeLine(String scriptDir, String scriptFile) - throws IOException, HiveMetaException { - NestedScriptParser dbCommandParser = getDbCommandParser(dbType); - // expand the nested script - String sqlCommands = dbCommandParser.buildCommand(scriptDir, scriptFile); - File tmpFile = File.createTempFile("schematool", ".sql"); - tmpFile.deleteOnExit(); - - // write out the buffer into a file. Add beeline commands for autocommit and close - FileWriter fstream = new FileWriter(tmpFile.getPath()); - BufferedWriter out = new BufferedWriter(fstream); - out.write("!autocommit on" + System.getProperty("line.separator")); - out.write(sqlCommands); - out.write("!closeall" + System.getProperty("line.separator")); - out.close(); - runBeeLine(tmpFile.getPath()); - } - - // Generate the beeline args per hive conf and execute the given script - public void runBeeLine(String sqlScriptFile) throws IOException { - List argList = new ArrayList(); - argList.add("-u"); - argList.add(HiveSchemaHelper.getValidConfVar( - ConfVars.METASTORECONNECTURLKEY, hiveConf)); - argList.add("-d"); - argList.add(HiveSchemaHelper.getValidConfVar( - ConfVars.METASTORE_CONNECTION_DRIVER, hiveConf)); - argList.add("-n"); - argList.add(userName); - argList.add("-p"); - argList.add(passWord); - argList.add("-f"); - argList.add(sqlScriptFile); - - if (LOG.isDebugEnabled()) { - LOG.debug("Going to invoke file that contains:"); - FileReader fr = new FileReader(sqlScriptFile); - BufferedReader reader = new BufferedReader(fr); - String line; - while ((line = reader.readLine()) != null) { - LOG.debug("script: " + line); + private void executeScript(Connection conn, String scriptDir, String scriptFile) + throws IOException, HiveMetaException, SQLException { + Statement stmt = null; + try { + NestedScriptParser dbCommandParser = getDbCommandParser(dbType); + // expand the nested script + List sqlCommands = dbCommandParser.buildCommands(scriptDir, scriptFile); + stmt = conn.createStatement(); + for (String sqlCommand : sqlCommands) { + stmt.addBatch(sqlCommand); + } + stmt.executeBatch(); + // dryRun mode doesn't commit the change + if (!dryRun) { + conn.commit(); + } + } finally { + if (stmt != null) { + try { + stmt.close(); + } catch(SQLException e) { + System.err.println("Failed to close statement:" + e.getMessage()); + } } - } - - // run the script using Beeline - BeeLine beeLine = new BeeLine(); - if (!verbose) { - beeLine.setOutputStream(new PrintStream(new NullOutputStream())); - beeLine.getOpts().setSilent(true); - } - beeLine.getOpts().setAllowMultiLineCommand(false); - beeLine.getOpts().setIsolation("TRANSACTION_READ_COMMITTED"); - // We can be pretty sure that an entire line can be processed as a single command since - // we always add a line separator at the end while calling dbCommandParser.buildCommand. - beeLine.getOpts().setEntireLineAsCommand(true); - LOG.debug("Going to run command <" + StringUtils.join(argList, " ") + ">"); - int status = beeLine.begin(argList.toArray(new String[0]), null); - if (status != 0) { - throw new IOException("Schema script failed, errorcode " + status); } } @@ -425,7 +446,7 @@ private static void initOptions(Options cmdLineOptions) { Option dbOpts = OptionBuilder.withArgName("databaseOpts") .hasArgs().withDescription("Backend DB specific options") .create("dbOpts"); - Option dryRunOpt = new Option("dryRun", "list SQL scripts (no execute)"); + Option dryRunOpt = new Option("dryRun", "execute the SQL scripts without the final commit"); Option verboseOpt = new Option("verbose", "only print SQL statements"); cmdLineOptions.addOption(help); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java index 0d5f9c8..10da574 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java @@ -25,12 +25,12 @@ import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; +import java.util.List; import java.util.Random; import junit.framework.TestCase; import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaException; import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo; @@ -75,7 +75,7 @@ protected void tearDown() throws Exception { */ public void testSchemaInitDryRun() throws Exception { schemaTool.setDryRun(true); - schemaTool.doInit("0.7.0"); + schemaTool.doInit(MetaStoreSchemaInfo.getHiveSchemaVersion()); schemaTool.setDryRun(false); try { schemaTool.verifySchemaVersion(); @@ -91,15 +91,19 @@ public void testSchemaInitDryRun() throws Exception { * @throws Exception */ public void testSchemaUpgradeDryRun() throws Exception { - schemaTool.doInit("0.7.0"); - + try { + schemaTool.doInit("0.7.0"); + } catch(HiveMetaException e) { + // Initialization succeeds while the old schema doesn't have VERSION table + // So expected to fail from version check + } schemaTool.setDryRun(true); schemaTool.doUpgrade("0.7.0"); schemaTool.setDryRun(false); try { schemaTool.verifySchemaVersion(); } catch (HiveMetaException e) { - // The connection should fail since it the dry run + // The connection should fail since it's a dry run return; } fail("Dry run shouldn't upgrade metastore schema"); @@ -120,13 +124,12 @@ public void testSchemaInit() throws Exception { */ public void testSchemaUpgrade() throws Exception { boolean foundException = false; - // Initialize 0.7.0 schema - schemaTool.doInit("0.7.0"); - // verify that driver fails due to older version schema try { - schemaTool.verifySchemaVersion(); + // Initialize 0.7.0 schema + schemaTool.doInit("0.7.0"); } catch (HiveMetaException e) { - // Expected to fail due to old schema + // Initialization succeeds while the old schema doesn't have VERSION table + // So expected to fail from version check foundException = true; } if (!foundException) { @@ -182,20 +185,25 @@ public void testScripts() throws Exception { "DROP TABLE footab;", "-- ending comment" }; - String resultScript[] = { + String expectedCommands[] = { "DROP TABLE IF EXISTS fooTab", "/*!1234 this is comment code like mysql */", "CREATE TABLE fooTab(id INTEGER)", "DROP TABLE footab", }; - String expectedSQL = StringUtils.join(resultScript, System.getProperty("line.separator")) + - System.getProperty("line.separator"); File testScriptFile = generateTestScript(testScript); - String flattenedSql = HiveSchemaHelper.getDbCommandParser("derby") - .buildCommand(testScriptFile.getParentFile().getPath(), + List resultCommands = HiveSchemaHelper.getDbCommandParser("derby") + .buildCommands(testScriptFile.getParentFile().getPath(), testScriptFile.getName()); - assertEquals(expectedSQL, flattenedSql); + verifyCommands(expectedCommands, resultCommands); + } + + private void verifyCommands(String[] expectedCommands, List resultCommands) { + assertEquals(expectedCommands.length, resultCommands.size()); + for (int i = 0; i < expectedCommands.length; i++) { + assertEquals(expectedCommands[i].trim(), resultCommands.get(i).trim()); + } } /** @@ -232,15 +240,23 @@ public void testNestedScriptsForDerby() throws Exception { "--ending comment ", }; + String[] expectedCommands = { + "DROP TABLE IF EXISTS " + parentTab, + "CREATE TABLE " + parentTab + "(id INTEGER)", + "DROP TABLE IF EXISTS " + childTab1, + "CREATE TABLE " + childTab1 + "(id INTEGER)", + "DROP TABLE " + childTab1, + "DROP TABLE " + parentTab, + "DROP TABLE IF EXISTS " + childTab2, + "CREATE TABLE " + childTab2 + "(id INTEGER)", + "DROP TABLE " + childTab2 + }; + File testScriptFile = generateTestScript(parentTestScript); - String flattenedSql = HiveSchemaHelper.getDbCommandParser("derby") - .buildCommand(testScriptFile.getParentFile().getPath(), + List resultCommands = HiveSchemaHelper.getDbCommandParser("derby") + .buildCommands(testScriptFile.getParentFile().getPath(), testScriptFile.getName()); - assertFalse(flattenedSql.contains("RUN")); - assertFalse(flattenedSql.contains("comment")); - assertTrue(flattenedSql.contains(childTab1)); - assertTrue(flattenedSql.contains(childTab2)); - assertTrue(flattenedSql.contains(parentTab)); + verifyCommands(expectedCommands, resultCommands); } /** @@ -277,15 +293,25 @@ public void testNestedScriptsForMySQL() throws Exception { "--ending comment ", }; + String[] expectedCommands = { + "DROP TABLE IF EXISTS " + parentTab, + "/* this is special exec code */", + "CREATE TABLE " + parentTab + "(id INTEGER)", + "DROP TABLE IF EXISTS " + childTab1, + "CREATE TABLE " + childTab1 + "(id INTEGER)", + "DROP TABLE " + childTab1, + "DROP TABLE " + parentTab, + "/* this is a special exec code */", + "DROP TABLE IF EXISTS " + childTab2, + "CREATE TABLE " + childTab2 + "(id INTEGER)", + "DROP TABLE " + childTab2 + }; + File testScriptFile = generateTestScript(parentTestScript); - String flattenedSql = HiveSchemaHelper.getDbCommandParser("mysql") - .buildCommand(testScriptFile.getParentFile().getPath(), + List resultCommands = HiveSchemaHelper.getDbCommandParser("mysql") + .buildCommands(testScriptFile.getParentFile().getPath(), testScriptFile.getName()); - assertFalse(flattenedSql.contains("RUN")); - assertFalse(flattenedSql.contains("comment")); - assertTrue(flattenedSql.contains(childTab1)); - assertTrue(flattenedSql.contains(childTab2)); - assertTrue(flattenedSql.contains(parentTab)); + verifyCommands(expectedCommands, resultCommands); } /** @@ -307,7 +333,7 @@ public void testScriptWithDelimiter() throws Exception { "DROP TABLE footab;", "-- ending comment" }; - String resultScript[] = { + String expectedCommands[] = { "DROP TABLE IF EXISTS fooTab", "/*!1234 this is comment code like mysql */", "CREATE TABLE fooTab(id INTEGER)", @@ -317,14 +343,13 @@ public void testScriptWithDelimiter() throws Exception { "END PROCEDURE", "DROP TABLE footab", }; - String expectedSQL = StringUtils.join(resultScript, System.getProperty("line.separator")) + - System.getProperty("line.separator"); + File testScriptFile = generateTestScript(testScript); NestedScriptParser testDbParser = HiveSchemaHelper.getDbCommandParser("mysql"); - String flattenedSql = testDbParser.buildCommand(testScriptFile.getParentFile().getPath(), + List resultCommands = testDbParser.buildCommands(testScriptFile.getParentFile().getPath(), testScriptFile.getName()); - assertEquals(expectedSQL, flattenedSql); + verifyCommands(expectedCommands, resultCommands); } /** @@ -345,21 +370,19 @@ public void testScriptMultiRowComment() throws Exception { "DROP TABLE footab;", "-- ending comment" }; - String parsedScript[] = { + String expectedCommands[] = { "DROP TABLE IF EXISTS fooTab", "/*!1234 this is comment code like mysql */", "CREATE TABLE fooTab(id INTEGER)", "DROP TABLE footab", }; - String expectedSQL = StringUtils.join(parsedScript, System.getProperty("line.separator")) + - System.getProperty("line.separator"); File testScriptFile = generateTestScript(testScript); NestedScriptParser testDbParser = HiveSchemaHelper.getDbCommandParser("mysql"); - String flattenedSql = testDbParser.buildCommand(testScriptFile.getParentFile().getPath(), + List resultCommands = testDbParser.buildCommands(testScriptFile.getParentFile().getPath(), testScriptFile.getName()); - assertEquals(expectedSQL, flattenedSql); + verifyCommands(expectedCommands, resultCommands); } /** @@ -396,15 +419,24 @@ public void testNestedScriptsForOracle() throws Exception { "--ending comment ", }; + String[] expectedCommands = { + "DROP TABLE IF EXISTS " + parentTab, + "CREATE TABLE " + parentTab + "(id INTEGER)", + "DROP TABLE IF EXISTS " + childTab1, + "CREATE TABLE " + childTab1 + "(id INTEGER)", + "DROP TABLE " + childTab1, + "DROP TABLE " + parentTab, + "DROP TABLE IF EXISTS " + childTab2, + "CREATE TABLE " + childTab2 + "(id INTEGER)", + "DROP TABLE " + childTab2 + }; + File testScriptFile = generateTestScript(parentTestScript); - String flattenedSql = HiveSchemaHelper.getDbCommandParser("oracle") - .buildCommand(testScriptFile.getParentFile().getPath(), + List resultCommands = HiveSchemaHelper.getDbCommandParser("oracle") + .buildCommands(testScriptFile.getParentFile().getPath(), testScriptFile.getName()); - assertFalse(flattenedSql.contains("@")); - assertFalse(flattenedSql.contains("comment")); - assertTrue(flattenedSql.contains(childTab1)); - assertTrue(flattenedSql.contains(childTab2)); - assertTrue(flattenedSql.contains(parentTab)); + + verifyCommands(expectedCommands, resultCommands); } /** @@ -430,13 +462,10 @@ public void testPostgresFilter() throws Exception { NestedScriptParser noDbOptParser = HiveSchemaHelper .getDbCommandParser("postgres"); - String expectedSQL = StringUtils.join( - expectedScriptWithOptionPresent, System.getProperty("line.separator")) + - System.getProperty("line.separator"); File testScriptFile = generateTestScript(testScript); - String flattenedSql = noDbOptParser.buildCommand( + List resultCommands = noDbOptParser.buildCommands( testScriptFile.getParentFile().getPath(), testScriptFile.getName()); - assertEquals(expectedSQL, flattenedSql); + verifyCommands(expectedScriptWithOptionPresent, resultCommands); String expectedScriptWithOptionAbsent[] = { "DROP TABLE IF EXISTS fooTab", @@ -448,13 +477,10 @@ public void testPostgresFilter() throws Exception { "postgres", PostgresCommandParser.POSTGRES_SKIP_STANDARD_STRINGS_DBOPT, null, null, null); - expectedSQL = StringUtils.join( - expectedScriptWithOptionAbsent, System.getProperty("line.separator")) + - System.getProperty("line.separator"); testScriptFile = generateTestScript(testScript); - flattenedSql = dbOptParser.buildCommand( + resultCommands = dbOptParser.buildCommands( testScriptFile.getParentFile().getPath(), testScriptFile.getName()); - assertEquals(expectedSQL, flattenedSql); + verifyCommands(expectedScriptWithOptionAbsent, resultCommands); } private File generateTestScript(String [] stmts) throws IOException { @@ -482,7 +508,7 @@ private String writeDummyPreUpgradeScript(int index, String upgradeScriptName, File.separatorChar + preUpgradeScript; FileWriter fstream = new FileWriter(dummyPreScriptPath); BufferedWriter out = new BufferedWriter(fstream); - out.write(sql + System.getProperty("line.separator") + ";"); + out.write(sql + System.getProperty("line.separator")); out.close(); return preUpgradeScript; } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java index 9c30ee7..2b5b206 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java @@ -19,7 +19,6 @@ import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException;