diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java index 4c6eb9b..1bb83e8 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -96,7 +96,7 @@ public class BeeLine { private static final ResourceBundle resourceBundle = ResourceBundle.getBundle(BeeLine.class.getName()); - private BeeLineSignalHandler signalHandler = null; + private final BeeLineSignalHandler signalHandler = null; private static final String separator = System.getProperty("line.separator"); private boolean exit = false; private final DatabaseConnections connections = new DatabaseConnections(); @@ -653,7 +653,7 @@ public void close() { public ConsoleReader getConsoleReader(InputStream inputStream) throws IOException { if (inputStream != null) { // ### NOTE: fix for sf.net bug 879425. - consoleReader = new ConsoleReader(inputStream, new PrintWriter(System.out, true)); + consoleReader = new ConsoleReader(inputStream, new PrintWriter(getOutputStream(), true)); } else { consoleReader = new ConsoleReader(); } @@ -1614,7 +1614,7 @@ void setCompletions() throws SQLException, IOException { } } - BeeLineOpts getOpts() { + public BeeLineOpts getOpts() { return opts; } diff --git beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java new file mode 100644 index 0000000..a629bd2 --- /dev/null +++ beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java @@ -0,0 +1,147 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.beeline; + +import java.util.IllegalFormatException; + +public class HiveSchemaHelper { + public static final String DB_DERBY = "derby"; + public static final String DB_MYSQL = "mysql"; + public static final String DB_POSTGRACE = "postgrace"; + public static final String DB_ORACLE = "oracle"; + + public interface NestedScriptParser { + /** Parse the DB specific nesting format and extract the inner script name if any + * @param dbCommand command from parent script + * @return + * @throws IllegalFormatException + */ + public String getScriptName(String dbCommand) throws IllegalFormatException; + } + + // Derby commandline parser + public static class DerbyCommandParser implements NestedScriptParser { + private static String nestingToken = "RUN"; + @Override + public String getScriptName(String dbCommand) throws IllegalArgumentException { + // Derby script format is + // RUN '' + if (dbCommand == null) { + throw new IllegalArgumentException("Null command line "); + } + if (dbCommand.isEmpty() || !dbCommand.startsWith(nestingToken)) { + return null; + } + + String[] tokens = dbCommand.split(" "); + if (tokens.length != 2) { + throw new IllegalArgumentException("Couldn't parse line " + dbCommand); + } + // remove quotes around the script name + if (tokens[1].endsWith(";")) { + return tokens[1].substring(1, tokens[1].length()-2); + } else { + return tokens[1].substring(1, tokens[1].length()-1); + } + } + } + + // MySQL parser + public static class MySqlCommandParser implements NestedScriptParser { + private static String nestingToken = "SOURCE"; + @Override + public String getScriptName(String dbCommand) throws IllegalFormatException { + if (dbCommand == null) { + throw new IllegalArgumentException("Null command line "); + } + if (dbCommand.isEmpty() || !dbCommand.startsWith(nestingToken)) { + return null; + } + String[] tokens = dbCommand.split(" "); + if (tokens.length != 2) { + throw new IllegalArgumentException("Couldn't parse line " + dbCommand); + } + // remove ending ';' + if (tokens[1].endsWith(";")) { + return tokens[1].substring(0, tokens[1].length()-1); + } else { + return tokens[1]; + } + } + } + + // Postgres specific parser + public static class PostgresCommandParser implements NestedScriptParser { + private static String nestingToken = "\\i"; + + @Override + public String getScriptName(String dbCommand) throws IllegalFormatException { + if (dbCommand == null) { + throw new IllegalArgumentException("Null command line "); + } + if (dbCommand.isEmpty() || !dbCommand.startsWith(nestingToken)) { + return null; + } + String[] tokens = dbCommand.split(" "); + if (tokens.length != 2) { + throw new IllegalArgumentException("Couldn't parse line " + dbCommand); + } + // remove ending ';' + if (tokens[1].endsWith(";")) { + return tokens[1].substring(0, tokens[1].length()-1); + } else { + return tokens[1]; + } + } + } + + //Oracle specific parser + public static class OracleCommandParser implements NestedScriptParser { + private static String nestingToken = "@"; + @Override + public String getScriptName(String dbCommand) throws IllegalFormatException { + if (dbCommand == null) { + throw new IllegalArgumentException("Null command line "); + } + if (dbCommand.isEmpty() || !dbCommand.startsWith(nestingToken)) { + return null; + } + // remove ending ';' + if (dbCommand.endsWith(";")) { + return dbCommand.substring(1, dbCommand.length()-1); + } else { + return dbCommand.substring(1); + } + + } + } + + public static NestedScriptParser getDbCommandParser(String dbName) { + if (dbName.equalsIgnoreCase(DB_DERBY)) { + return new DerbyCommandParser(); + } else if (dbName.equalsIgnoreCase(DB_MYSQL)) { + return new MySqlCommandParser(); + } else if (dbName.equalsIgnoreCase(DB_POSTGRACE)) { + return new PostgresCommandParser(); + } else if (dbName.equalsIgnoreCase(DB_ORACLE)) { + return new OracleCommandParser(); + } else { + throw new IllegalArgumentException("Unknown dbType " + dbName); + } + } +} diff --git beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java new file mode 100644 index 0000000..2594ce8 --- /dev/null +++ beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java @@ -0,0 +1,494 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.beeline; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintStream; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.IllegalFormatException; +import java.util.List; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.OptionGroup; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.output.NullOutputStream; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.HiveMetaException; +import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hive.beeline.HiveSchemaHelper.NestedScriptParser; + +public class HiveSchemaTool { + private String userName = null; + private String passWord = null; + private boolean dryRun = false; + private boolean verbose = false; + private final HiveConf hiveConf; + private final String dbType; + private final MetaStoreSchemaInfo metaStoreSchemaInfo; + + public HiveSchemaTool(String dbType) throws HiveMetaException { + this(System.getenv("HIVE_HOME"), new HiveConf(HiveSchemaTool.class), dbType); + } + + public HiveSchemaTool(String hiveHome, HiveConf hiveConf, String dbType) + throws HiveMetaException { + if (hiveHome == null || hiveHome.isEmpty()) { + throw new HiveMetaException("No Hive home directory provided"); + } + this.hiveConf = hiveConf; + this.dbType = dbType; + this.metaStoreSchemaInfo = new MetaStoreSchemaInfo(hiveHome, hiveConf, dbType); + userName = hiveConf.get(ConfVars.METASTORE_CONNECTION_USER_NAME.varname); + passWord = hiveConf.get(HiveConf.ConfVars.METASTOREPWD.varname); + } + + public HiveConf getHiveConf() { + return hiveConf; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public void setPassWord(String passWord) { + this.passWord = passWord; + } + + public void setDryRun(boolean dryRun) { + this.dryRun = dryRun; + } + + public void setVerbose(boolean verbose) { + this.verbose = verbose; + } + + private static void printAndExit(Options cmdLineOptions) { + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("schemaTool", cmdLineOptions); + System.exit(1); + } + + /*** + * Print Hive version and schema version + * @throws MetaException + */ + public void showInfo() throws HiveMetaException { + Connection metastoreConn = getConnectionToMetastore(true); + System.out.println("Hive distribution version:\t " + + MetaStoreSchemaInfo.getHiveSchemaVersion()); + System.out.println("Metastore schema version:\t " + + getMetaStoreSchemaVersion(metastoreConn)); + } + + // read schema version from metastore + private String getMetaStoreSchemaVersion(Connection metastoreConn) + throws HiveMetaException { + String versionQuery = "select t.SCHEMA_VERSION from VERSION t"; + try { + Statement stmt = metastoreConn.createStatement(); + ResultSet res = stmt.executeQuery(versionQuery); + if (!res.next()) { + throw new HiveMetaException("Didn't find version data in metastore"); + } + String currentSchemaVersion = res.getString(1); + metastoreConn.close(); + return currentSchemaVersion; + } catch (SQLException e) { + throw new HiveMetaException("Failed to get schema version.", e); + } + } + + // test the connection metastore using the config property + private void testConnectionToMetastore() throws HiveMetaException { + Connection conn = getConnectionToMetastore(true); + try { + conn.close(); + } catch (SQLException e) { + throw new HiveMetaException("Failed to close metastore connection", e); + } + } + + /*** + * get JDBC connection to metastore db + * + * @param printInfo print connection parameters + * @return + * @throws MetaException + */ + private Connection getConnectionToMetastore(boolean printInfo) + throws HiveMetaException { + try { + String connectionURL = getValidConfVar(ConfVars.METASTORECONNECTURLKEY); + String driver = getValidConfVar(ConfVars.METASTORE_CONNECTION_DRIVER); + if (printInfo) { + System.out.println("Metastore connection URL:\t " + connectionURL); + System.out.println("Metastore Connection Driver :\t " + driver); + System.out.println("Metastore connection User:\t " + userName); + } + if ((userName == null) || userName.isEmpty()) { + throw new HiveMetaException("UserName empty "); + } + + // load required JDBC driver + Class.forName(driver); + + // Connect using the JDBC URL and user/pass from conf + return DriverManager.getConnection(connectionURL, userName, passWord); + } catch (IOException e) { + throw new HiveMetaException("Failed to get schema version.", e); + } catch (SQLException e) { + throw new HiveMetaException("Failed to get schema version.", e); + } catch (ClassNotFoundException e) { + throw new HiveMetaException("Failed to load driver", e); + } + } + + /** + * check if the current schema version in metastore matches the Hive version + * @throws MetaException + */ + public void verifySchemaVersion() throws HiveMetaException { + // don't check version if its a dry run + if (dryRun) { + return; + } + String newSchemaVersion = + getMetaStoreSchemaVersion(getConnectionToMetastore(false)); + // verify that the new version is added to schema + if (!MetaStoreSchemaInfo.getHiveSchemaVersion().equalsIgnoreCase(newSchemaVersion)) { + throw new HiveMetaException("Found unexpected schema version " + newSchemaVersion); + } + } + + /** + * Perform metastore schema upgrade. extract the current schema version from metastore + * @throws MetaException + */ + public void doUpgrade() throws HiveMetaException { + String fromVersion = getMetaStoreSchemaVersion(getConnectionToMetastore(false)); + if (fromVersion == null || fromVersion.isEmpty()) { + throw new HiveMetaException("Schema version not stored in the metastore. " + + "Metastore schema is too old or corrupt. Try specifying the version manually"); + } + doUpgrade(fromVersion); + } + + /** + * Perform metastore schema upgrade + * + * @param fromSchemaVer + * Existing version of the metastore. If null, then read from the metastore + * @throws MetaException + */ + public void doUpgrade(String fromSchemaVer) throws HiveMetaException { + if (MetaStoreSchemaInfo.getHiveSchemaVersion().equals(fromSchemaVer)) { + System.out.println("No schema upgrade required from version " + fromSchemaVer); + return; + } + // Find the list of scripts to execute for this upgrade + List upgradeScripts = + metaStoreSchemaInfo.getUpgradeScripts(fromSchemaVer); + testConnectionToMetastore(); + System.out.println("Starting upgrade metastore schema from version " + + fromSchemaVer + " to " + MetaStoreSchemaInfo.getHiveSchemaVersion()); + String scriptDir = metaStoreSchemaInfo.getMetaStoreScriptDir(); + try { + for (String scriptFile : upgradeScripts) { + System.out.println("Upgrade script " + scriptFile); + if (!dryRun) { + runBeeLine(scriptDir, scriptFile); + System.out.println("Completed " + scriptFile); + } + } + } catch (IOException eIO) { + throw new HiveMetaException( + "Upgrade FAILED! Metastore state would be inconsistent !!", eIO); + } + + // Revalidated the new version after upgrade + verifySchemaVersion(); + } + + /** + * Initialize the metastore schema to current version + * + * @throws MetaException + */ + public void doInit() throws HiveMetaException { + doInit(MetaStoreSchemaInfo.getHiveSchemaVersion()); + + // Revalidated the new version after upgrade + verifySchemaVersion(); + } + + /** + * Initialize the metastore schema + * + * @param toVersion + * If null then current hive version is used + * @throws MetaException + */ + public void doInit(String toVersion) throws HiveMetaException { + testConnectionToMetastore(); + System.out.println("Starting metastore schema initialization to " + toVersion); + + String initScriptDir = metaStoreSchemaInfo.getMetaStoreScriptDir(); + String initScriptFile = metaStoreSchemaInfo.generateInitFileName(toVersion); + + try { + System.out.println("Initialization script " + initScriptFile); + if (!dryRun) { + runBeeLine(initScriptDir, initScriptFile); + System.out.println("Initialization script completed"); + } + } catch (IOException e) { + throw new HiveMetaException("Schema initialization FAILED!" + + " Metastore state would be inconsistent !!", e); + } + } + + // Flatten the nested upgrade script into a buffer + public static String buildCommand(NestedScriptParser dbCommandParser, + String scriptDir, String scriptFile) throws IllegalFormatException, IOException { + + BufferedReader bfReader = + new BufferedReader(new FileReader(scriptDir + File.separatorChar + scriptFile)); + String currLine; + StringBuilder sb = new StringBuilder(); + while ((currLine = bfReader.readLine()) != null) { + if (currLine.trim().isEmpty()) { + continue; // skip empty lines + } + String currScript = dbCommandParser.getScriptName(currLine); + // if this is a nested sql script then flatten it + if (currScript != null) { + sb.append(buildCommand(dbCommandParser, scriptDir, currScript)); + } else { + // write the line to buffer + sb.append(currLine.trim()); + if (isComment(currLine) || currLine.trim().endsWith(";")) { + sb.append(System.getProperty("line.separator")); + } else { + sb.append(" "); + } + } + } + bfReader.close(); + return sb.toString(); + } + + // check if the given line is a comment, starts with -- or # + private static boolean isComment(String sqlLine) { + return (sqlLine.trim().startsWith("--") || sqlLine.trim().startsWith("#")); + } + + // run beeline on the given metastore scrip, flatten the nested scripts into single file + private void runBeeLine(String scriptDir, String scriptFile) throws IOException { + NestedScriptParser dbCommandParser = + HiveSchemaHelper.getDbCommandParser(dbType); + // expand the nested script + String sqlCommands = buildCommand(dbCommandParser, scriptDir, scriptFile); + File tmpFile = File.createTempFile("schematool", ".sql"); + tmpFile.deleteOnExit(); + + // write out the buffer into a file. Add beeline commands for autocommit and close + FileWriter fstream = new FileWriter(tmpFile.getPath()); + BufferedWriter out = new BufferedWriter(fstream); + out.write("!autocommit on" + System.getProperty("line.separator")); + out.write(sqlCommands); + out.write("!closeall" + System.getProperty("line.separator")); + out.close(); + runBeeLine(tmpFile.getPath()); + } + + // Generate the beeline args per hive conf and execute the given script + public void runBeeLine(String sqlScriptFile) throws IOException { + List argList = new ArrayList(); + argList.add("-u"); + argList.add(getValidConfVar(ConfVars.METASTORECONNECTURLKEY)); + argList.add("-d"); + argList.add(getValidConfVar(ConfVars.METASTORE_CONNECTION_DRIVER)); + argList.add("-n"); + argList.add(userName); + argList.add("-p"); + argList.add(passWord); + argList.add("-f"); + argList.add(sqlScriptFile); + + // run the script using Beeline + BeeLine beeLine = new BeeLine(); + if (!verbose) { + beeLine.setOutputStream(new PrintStream(new NullOutputStream())); + beeLine.getOpts().setSilent(true); + } + int status = beeLine.begin(argList.toArray(new String[0]), null); + if (status != 0) { + throw new IOException("Schema script failed, errorcode " + status); + } + } + + private String getValidConfVar(ConfVars confVar) throws IOException { + String confVarStr = hiveConf.get(confVar.varname); + if (confVarStr == null || confVarStr.isEmpty()) { + throw new IOException("Empty " + confVar.varname); + } + return confVarStr; + } + + // Create the required command line options + @SuppressWarnings("static-access") + private static void initOptions(Options cmdLineOptions) { + Option help = new Option("help", "print this message"); + Option upgradeOpt = new Option("upgradeSchema", "Schema upgrade"); + Option upgradeFromOpt = OptionBuilder.withArgName("upgradeFrom").hasArg(). + withDescription("Schema upgrade from a version"). + create("upgradeSchemaFrom"); + Option initOpt = new Option("initSchema", "Schema initialization"); + Option initToOpt = OptionBuilder.withArgName("initTo").hasArg(). + withDescription("Schema initialization to a version"). + create("initSchemaTo"); + Option infoOpt = new Option("info", "Show config and schema details"); + + OptionGroup optGroup = new OptionGroup(); + optGroup.addOption(upgradeOpt).addOption(initOpt). + addOption(help).addOption(upgradeFromOpt). + addOption(initToOpt).addOption(infoOpt); + optGroup.setRequired(true); + + Option userNameOpt = OptionBuilder.withArgName("user") + .hasArgs() + .withDescription("Override config file user name") + .create("userName"); + Option passwdOpt = OptionBuilder.withArgName("password") + .hasArgs() + .withDescription("Override config file password") + .create("passWord"); + Option dbTypeOpt = OptionBuilder.withArgName("databaseType") + .hasArgs().withDescription("Metastore database type") + .create("dbType"); + Option dryRunOpt = new Option("dryRun", "list SQL scripts (no execute)"); + Option verboseOpt = new Option("verbose", "only print SQL statements"); + + cmdLineOptions.addOption(help); + cmdLineOptions.addOption(dryRunOpt); + cmdLineOptions.addOption(userNameOpt); + cmdLineOptions.addOption(passwdOpt); + cmdLineOptions.addOption(dbTypeOpt); + cmdLineOptions.addOption(verboseOpt); + cmdLineOptions.addOptionGroup(optGroup); + } + + public static void main(String[] args) { + CommandLineParser parser = new GnuParser(); + CommandLine line = null; + String dbType = null; + String schemaVer = null; + Options cmdLineOptions = new Options(); + + // Argument handling + initOptions(cmdLineOptions); + try { + line = parser.parse(cmdLineOptions, args); + } catch (ParseException e) { + System.err.println("HiveSchemaTool:Parsing failed. Reason: " + e.getLocalizedMessage()); + printAndExit(cmdLineOptions); + } + + if (line.hasOption("help")) { + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("schemaTool", cmdLineOptions); + return; + } + + if (line.hasOption("dbType")) { + dbType = line.getOptionValue("dbType"); + if ((!dbType.equalsIgnoreCase(HiveSchemaHelper.DB_DERBY) && + !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MYSQL) && + !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_POSTGRACE) && !dbType + .equalsIgnoreCase(HiveSchemaHelper.DB_ORACLE))) { + System.err.println("Unsupported dbType " + dbType); + printAndExit(cmdLineOptions); + } + } else { + System.err.println("no dbType supplied"); + printAndExit(cmdLineOptions); + } + + System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "true"); + try { + HiveSchemaTool schemaTool = new HiveSchemaTool(dbType); + + if (line.hasOption("userName")) { + schemaTool.setUserName(line.getOptionValue("userName")); + } + if (line.hasOption("passWord")) { + schemaTool.setPassWord(line.getOptionValue("passWord")); + } + if (line.hasOption("dryRun")) { + schemaTool.setDryRun(true); + } + if (line.hasOption("verbose")) { + schemaTool.setVerbose(true); + } + + if (line.hasOption("info")) { + schemaTool.showInfo(); + } else if (line.hasOption("upgradeSchema")) { + schemaTool.doUpgrade(); + } else if (line.hasOption("upgradeSchemaFrom")) { + schemaVer = line.getOptionValue("upgradeSchemaFrom"); + schemaTool.doUpgrade(schemaVer); + } else if (line.hasOption("initSchema")) { + schemaTool.doInit(); + } else if (line.hasOption("initSchemaTo")) { + schemaVer = line.getOptionValue("initSchemaTo"); + schemaTool.doInit(schemaVer); + } else { + System.err.println("no valid option supplied"); + printAndExit(cmdLineOptions); + } + } catch (HiveMetaException e) { + System.err.println(e); + if (line.hasOption("verbose")) { + e.printStackTrace(); + } + System.err.println("*** schemaTool failed ***"); + System.exit(1); + } + System.out.println("schemaTool completeted"); + + } +} diff --git beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java new file mode 100644 index 0000000..6a96d77 --- /dev/null +++ beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java @@ -0,0 +1,209 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.beeline.src.test; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Random; + +import junit.framework.TestCase; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaException; +import org.apache.hive.beeline.HiveSchemaHelper; +import org.apache.hive.beeline.HiveSchemaTool; + +public class TestSchemaTool extends TestCase { + private HiveSchemaTool schemaTool; + private HiveConf hiveConf; + private String testMetastoreDB; + + @Override + protected void setUp() throws Exception { + super.setUp(); + testMetastoreDB = System.getProperty("java.io.tmpdir") + + File.separator + "test_metastore-" + new Random().nextInt(); + System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, + "jdbc:derby:" + testMetastoreDB + ";create=true"); + hiveConf = new HiveConf(this.getClass()); + schemaTool = new HiveSchemaTool(System.getProperty("hive.home"), hiveConf, "derby"); + System.setProperty("beeLine.system.exit", "true"); + } + + @Override + protected void tearDown() throws Exception { + File metaStoreDir = new File(testMetastoreDB); + if (metaStoreDir.exists()) { + FileUtils.deleteDirectory(metaStoreDir); + } + } + /** + * Test dryrun of schema initialization + * @throws Exception + */ + public void testSchemaInitDryRun() throws Exception { + schemaTool.setDryRun(true); + schemaTool.doInit("0.7.0"); + schemaTool.setDryRun(false); + try { + schemaTool.verifySchemaVersion(); + } catch (HiveMetaException e) { + // The connection should fail since it the dry run + return; + } + fail("Dry run shouldn't create actual metastore"); + } + + /** + * Test dryrun of schema upgrade + * @throws Exception + */ + public void testSchemaUpgradeDryRun() throws Exception { + schemaTool.doInit("0.7.0"); + + schemaTool.setDryRun(true); + schemaTool.doUpgrade("0.7.0"); + schemaTool.setDryRun(false); + try { + schemaTool.verifySchemaVersion(); + } catch (HiveMetaException e) { + // The connection should fail since it the dry run + return; + } + fail("Dry run shouldn't upgrade metastore schema"); + } + + /** + * Test schema initialization + * @throws Exception + */ + public void testSchemaInit() throws Exception { + schemaTool.doInit("0.12.0"); + } + + /** + * Test schema upgrade + * @throws Exception + */ + public void testSchemaUpgrade() throws Exception { + boolean foundException = false; + // Initialize 0.7.0 schema + schemaTool.doInit("0.7.0"); + // verify that driver fails due to older version schema + try { + schemaTool.verifySchemaVersion(); + } catch (HiveMetaException e) { + // Expected to fail due to old schema + foundException = true; + } + if (!foundException) { + throw new Exception("Hive operations shouldn't pass with older version schema"); + } + + // upgrade schema from 0.7.0 to latest + schemaTool.doUpgrade("0.7.0"); + // verify that driver works fine with latest schema + schemaTool.verifySchemaVersion(); + } + + /** + * Test script formatting + * @throws Exception + */ + public void testScripts() throws Exception { + String testScript[] = { + "-- this is a comment", + "DROP TABLE IF EXISTS fooTab;", + "/*!1234 this is comment code like mysql */;", + "CREATE TABLE fooTab(id INTEGER);", + "DROP TABLE footab;", + "-- ending comment" + }; + String expectedSQL = StringUtils.join(testScript, System.getProperty("line.separator")) + + System.getProperty("line.separator"); + File testScriptFile = generateTestScript(testScript); + String flattenedSql = HiveSchemaTool.buildCommand( + HiveSchemaHelper.getDbCommandParser("derby"), + testScriptFile.getParentFile().getPath(), testScriptFile.getName()); + + assertEquals(expectedSQL, flattenedSql); + } + + /** + * Test nested script formatting + * @throws Exception + */ + public void testNestedScriptsForDerby() throws Exception { + String childTab1 = "childTab1"; + String childTab2 = "childTab2"; + String parentTab = "fooTab"; + + String childTestScript1[] = { + "/* this is a comment code */", + "DROP TABLE IF EXISTS " + childTab1 + ";", + "CREATE TABLE " + childTab1 + "(id INTEGER);", + "DROP TABLE " + childTab1 + ";" + }; + String childTestScript2[] = { + "/* this is a comment code */;", + "DROP TABLE IF EXISTS " + childTab2 + ";", + "CREATE TABLE " + childTab2 + "(id INTEGER);", + "-- this is a comment", + "DROP TABLE " + childTab2 + ";" + }; + + String parentTestScript[] = { + " -- this is a comment", + "DROP TABLE IF EXISTS " + parentTab + ";", + " /* this is comment code */;", + "CREATE TABLE " + parentTab + "(id INTEGER);", + "RUN '" + generateTestScript(childTestScript1).getName() + "';", + "DROP TABLE " + parentTab + ";", + "RUN '" + generateTestScript(childTestScript2).getName() + "';", + "--ending comment ", + }; + + File testScriptFile = generateTestScript(parentTestScript); + String flattenedSql = HiveSchemaTool.buildCommand( + HiveSchemaHelper.getDbCommandParser("derby"), + testScriptFile.getParentFile().getPath(), testScriptFile.getName()); + assertFalse(flattenedSql.contains("RUN")); + assertTrue(flattenedSql.contains("comment")); + assertTrue(flattenedSql.contains(childTab1)); + assertTrue(flattenedSql.contains(childTab2)); + assertTrue(flattenedSql.contains(parentTab)); + } + + private File generateTestScript(String [] stmts) throws IOException { + File testScriptFile = File.createTempFile("schematest", ".sql"); + testScriptFile.deleteOnExit(); + FileWriter fstream = new FileWriter(testScriptFile.getPath()); + BufferedWriter out = new BufferedWriter(fstream); + for (String line: stmts) { + out.write(line); + out.newLine(); + } + out.close(); + return testScriptFile; + } +} diff --git bin/ext/schemaTool.sh bin/ext/schemaTool.sh new file mode 100644 index 0000000..885eff5 --- /dev/null +++ bin/ext/schemaTool.sh @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +THISSERVICE=schemaTool +export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} " + +schemaTool() { + + CLASS=org.apache.hive.beeline.HiveSchemaTool + execHiveCmd $CLASS "$@" +} + +schemaTool_help () { + CLASS=org.apache.hive.beeline.HiveSchemaTool + execHiveCmd $CLASS "--help" +} diff --git bin/schematool bin/schematool new file mode 100644 index 0000000..913800e --- /dev/null +++ bin/schematool @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +bin=`dirname "$0"` +bin=`cd "$bin"; pwd` + +. "$bin"/hive --service schemaTool "$@" diff --git build-common.xml build-common.xml index ad5ac23..e54415a 100644 --- build-common.xml +++ build-common.xml @@ -471,6 +471,7 @@ + diff --git build.xml build.xml index 3e87163..7ec73ef 100644 --- build.xml +++ build.xml @@ -449,6 +449,7 @@ +