diff --git hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 index 5ce0e23..66c12e7 100644 --- hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 +++ hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 @@ -219,7 +219,7 @@ declare_temporary_table_item : // DECLARE TEMPORARY TABLE statement ; create_table_stmt : - T_CREATE T_TABLE (T_IF T_NOT T_EXISTS)? table_name create_table_preoptions? create_table_definition + T_CREATE T_TABLE (T_IF T_NOT T_EXISTS)? table_name create_table_preoptions? create_table_definition ; create_local_temp_table_stmt : @@ -264,10 +264,15 @@ create_table_fk_action : ; create_table_preoptions : - (T_COMMA create_table_preoptions_item)+ + create_table_preoptions_item+ + ; + +create_table_preoptions_item : + T_COMMA create_table_preoptions_td_item + | create_table_options_hive_item ; -create_table_preoptions_item : +create_table_preoptions_td_item : T_NO? (T_LOG | T_FALLBACK) ; @@ -295,12 +300,13 @@ create_table_options_ora_item : ; create_table_options_db2_item : - T_IN ident + T_INDEX? T_IN ident | T_WITH T_REPLACE | T_DISTRIBUTE T_BY T_HASH T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P - | T_LOGGED - | T_NOT T_LOGGED + | T_NOT? T_LOGGED + | T_COMPRESS (T_YES | T_NO) | T_DEFINITION T_ONLY + | T_WITH T_RESTRICT T_ON T_DROP ; create_table_options_td_item : @@ -310,6 +316,7 @@ create_table_options_td_item : create_table_options_hive_item : create_table_hive_row_format + | T_STORED T_AS ident ; create_table_hive_row_format : @@ -404,9 +411,9 @@ dtype_attr : | T_NOT? (T_CASESPECIFIC | T_CS) ; -dtype_default : // Default clause in variable declaration +dtype_default : T_COLON? T_EQUAL expr - | T_DEFAULT expr + | T_WITH? T_DEFAULT expr? ; create_database_stmt : @@ -463,8 +470,8 @@ create_routine_params : | T_OPEN_P create_routine_param_item (T_COMMA create_routine_param_item)* T_CLOSE_P | {!_input.LT(1).getText().equalsIgnoreCase("IS") && !_input.LT(1).getText().equalsIgnoreCase("AS") && - !(_input.LT(1).getText().equalsIgnoreCase("DYNAMIC") && _input.LT(2).getText().equalsIgnoreCase("RESULT")) - }? + !(_input.LT(1).getText().equalsIgnoreCase("DYNAMIC") && _input.LT(2).getText().equalsIgnoreCase("RESULT")) + }? create_routine_param_item (T_COMMA create_routine_param_item)* ; @@ -623,7 +630,7 @@ copy_stmt : // COPY statement ; copy_source : - (ident | expr | L_FILE) + (file_name | expr) ; copy_target : @@ -647,13 +654,13 @@ copy_ftp_option : T_USER expr | T_PWD expr | T_DIR (file_name | expr) - | T_FILES expr - | T_NEW - | T_OVERWRITE - | T_SUBDIR - | T_SESSIONS expr - | T_TO T_LOCAL? (file_name | expr) - ; + | T_FILES expr + | T_NEW + | T_OVERWRITE + | T_SUBDIR + | T_SESSIONS expr + | T_TO T_LOCAL? (file_name | expr) + ; commit_stmt : // COMMIT statement T_COMMIT T_WORK? @@ -945,11 +952,11 @@ merge_action : delete_stmt : T_DELETE T_FROM? table_name delete_alias? (where_clause | T_ALL)? ; - + delete_alias : {!_input.LT(1).getText().equalsIgnoreCase("ALL")}? T_AS? ident - ; + ; describe_stmt : (T_DESCRIBE | T_DESC) T_TABLE? table_name @@ -1255,7 +1262,8 @@ non_reserved_words : // Tokens that are not reserved words | T_COLLECT | T_COLLECTION | T_COLUMN - | T_COMMENT + | T_COMMENT + | T_COMPRESS | T_CONSTANT | T_COPY | T_COMMIT @@ -1493,6 +1501,7 @@ non_reserved_words : // Tokens that are not reserved words | T_STEP | T_STDEV | T_STORAGE + | T_STORED | T_STRING | T_SUBDIR | T_SUBSTRING @@ -1536,6 +1545,7 @@ non_reserved_words : // Tokens that are not reserved words | T_WORK | T_XACT_ABORT | T_XML + | T_YES ; // Lexer rules @@ -1584,6 +1594,7 @@ T_COLUMN : C O L U M N ; T_COMMENT : C O M M E N T; T_CONSTANT : C O N S T A N T ; T_COMMIT : C O M M I T ; +T_COMPRESS : C O M P R E S S ; T_CONCAT : C O N C A T; T_CONDITION : C O N D I T I O N ; T_CONSTRAINT : C O N S T R A I N T ; @@ -1809,6 +1820,7 @@ T_STATS : S T A T S ; T_STATISTICS : S T A T I S T I C S ; T_STEP : S T E P ; T_STORAGE : S T O R A G E ; +T_STORED : S T O R E D ; T_STRING : S T R I N G ; T_SUBDIR : S U B D I R ; T_SUBSTRING : S U B S T R I N G ; @@ -1850,6 +1862,7 @@ T_WITHOUT : W I T H O U T ; T_WORK : W O R K ; T_XACT_ABORT : X A C T '_' A B O R T ; T_XML : X M L ; +T_YES : Y E S ; // Functions with specific syntax T_ACTIVITY_COUNT : A C T I V I T Y '_' C O U N T ; diff --git hplsql/src/main/java/org/apache/hive/hplsql/Copy.java hplsql/src/main/java/org/apache/hive/hplsql/Copy.java index cd671eb..67af0a9 100644 --- hplsql/src/main/java/org/apache/hive/hplsql/Copy.java +++ hplsql/src/main/java/org/apache/hive/hplsql/Copy.java @@ -274,14 +274,19 @@ else if (sqlInsert) { public Integer runFromLocal(HplsqlParser.Copy_from_local_stmtContext ctx) { trace(ctx, "COPY FROM LOCAL"); initFileOptions(ctx.copy_file_option()); - HashMap> src = new HashMap>(); - int cnt = ctx.copy_source().size(); - for (int i = 0; i < cnt; i++) { - createLocalFileList(src, evalPop(ctx.copy_source(i)).toString(), null); - } + HashMap> srcFiles = new HashMap>(); + String src = evalPop(ctx.copy_source(0)).toString(); String dest = evalPop(ctx.copy_target()).toString(); + int srcItems = ctx.copy_source().size(); + for (int i = 0; i < srcItems; i++) { + createLocalFileList(srcFiles, evalPop(ctx.copy_source(i)).toString(), null); + } if (info) { - info(ctx, "Files to copy: " + src.size() + " (" + Utils.formatSizeInBytes(srcSizeInBytes) + ")"); + info(ctx, "Files to copy: " + srcFiles.size() + " (" + Utils.formatSizeInBytes(srcSizeInBytes) + ")"); + } + if (srcFiles.size() == 0) { + exec.setHostCode(2); + return 2; } timer.start(); File file = new File(); @@ -292,10 +297,10 @@ public Integer runFromLocal(HplsqlParser.Copy_from_local_stmtContext ctx) { try { fs = file.createFs(); boolean multi = false; - if (src.size() > 1) { + if (srcFiles.size() > 1) { multi = true; } - for (Map.Entry> i : src.entrySet()) { + for (Map.Entry> i : srcFiles.entrySet()) { try { Path s = new Path(i.getKey()); Path d = null; @@ -305,11 +310,18 @@ public Integer runFromLocal(HplsqlParser.Copy_from_local_stmtContext ctx) { d = new Path(dest, s.getName()); } else { - d = new Path(dest, relativePath + java.io.File.separator + s.getName()); + d = new Path(dest, relativePath + Path.SEPARATOR + s.getName()); } } else { - d = new Path(dest); + // Path to file is specified (can be relative), so treat target as a file name (hadoop fs -put behavior) + if (srcItems == 1 && i.getKey().endsWith(src)) { + d = new Path(dest); + } + // Source directory is specified, so treat the target as a directory + else { + d = new Path(dest + Path.SEPARATOR + s.getName()); + } } fs.copyFromLocalFile(delete, overwrite, s, d); succeed++; diff --git hplsql/src/main/java/org/apache/hive/hplsql/Exec.java hplsql/src/main/java/org/apache/hive/hplsql/Exec.java index 67cf2ae..6da4f5b 100644 --- hplsql/src/main/java/org/apache/hive/hplsql/Exec.java +++ hplsql/src/main/java/org/apache/hive/hplsql/Exec.java @@ -850,34 +850,38 @@ boolean parseArguments(String[] args) { * Include statements from .hplsqlrc and hplsql rc files */ void includeRcFile() { - if (includeFile(Conf.DOT_HPLSQLRC)) { + if (includeFile(Conf.DOT_HPLSQLRC, false)) { dotHplsqlrcExists = true; } else { - if (includeFile(Conf.HPLSQLRC)) { + if (includeFile(Conf.HPLSQLRC, false)) { hplsqlrcExists = true; } } if (udfRun) { - includeFile(Conf.HPLSQL_LOCALS_SQL); + includeFile(Conf.HPLSQL_LOCALS_SQL, true); } } /** * Include statements from a file */ - boolean includeFile(String file) { + boolean includeFile(String file, boolean showError) { try { String content = FileUtils.readFileToString(new java.io.File(file), "UTF-8"); if (content != null && !content.isEmpty()) { if (trace) { - trace(null, "INLCUDE CONTENT " + file + " (non-empty)"); + trace(null, "INCLUDE CONTENT " + file + " (non-empty)"); } new Exec(this).include(content); return true; } } - catch (Exception e) {} + catch (Exception e) { + if (showError) { + error(null, "INCLUDE file error: " + e.getMessage()); + } + } return false; } @@ -1368,6 +1372,11 @@ public Integer visitCreate_table_options_mssql_item(HplsqlParser.Create_table_op } @Override + public Integer visitCreate_table_options_db2_item(HplsqlParser.Create_table_options_db2_itemContext ctx) { + return 0; + } + + @Override public Integer visitCreate_table_options_mysql_item(HplsqlParser.Create_table_options_mysql_itemContext ctx) { return exec.stmt.createTableMysqlOptions(ctx); } @@ -1412,11 +1421,11 @@ public Integer visitCreate_function_stmt(HplsqlParser.Create_function_stmtContex @Override public Integer visitCreate_package_stmt(HplsqlParser.Create_package_stmtContext ctx) { String name = ctx.ident(0).getText().toUpperCase(); - currentPackageDecl = new Package(name, exec); - packages.put(name, currentPackageDecl); + exec.currentPackageDecl = new Package(name, exec); + exec.packages.put(name, exec.currentPackageDecl); trace(ctx, "CREATE PACKAGE"); - currentPackageDecl.createSpecification(ctx); - currentPackageDecl = null; + exec.currentPackageDecl.createSpecification(ctx); + exec.currentPackageDecl = null; return 0; } @@ -1426,15 +1435,15 @@ public Integer visitCreate_package_stmt(HplsqlParser.Create_package_stmtContext @Override public Integer visitCreate_package_body_stmt(HplsqlParser.Create_package_body_stmtContext ctx) { String name = ctx.ident(0).getText().toUpperCase(); - currentPackageDecl = packages.get(name); - if (currentPackageDecl == null) { - currentPackageDecl = new Package(name, exec); - currentPackageDecl.setAllMembersPublic(true); - packages.put(name, currentPackageDecl); + exec.currentPackageDecl = exec.packages.get(name); + if (exec.currentPackageDecl == null) { + exec.currentPackageDecl = new Package(name, exec); + exec.currentPackageDecl.setAllMembersPublic(true); + exec.packages.put(name, exec.currentPackageDecl); } trace(ctx, "CREATE PACKAGE BODY"); - currentPackageDecl.createBody(ctx); - currentPackageDecl = null; + exec.currentPackageDecl.createBody(ctx); + exec.currentPackageDecl = null; return 0; } @@ -2465,6 +2474,18 @@ public void info(ParserRuleContext ctx, String message) { System.err.println(message); } } + + /** + * Error message + */ + public void error(ParserRuleContext ctx, String message) { + if (ctx != null) { + System.err.println("Ln:" + ctx.getStart().getLine() + " " + message); + } + else { + System.err.println(message); + } + } public Stack getStack() { return exec.stack; diff --git hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java index 1f2fc5c..2d0dfd6 100644 --- hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java +++ hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java @@ -78,12 +78,12 @@ Integer run(HplsqlParser.Copy_from_ftp_stmtContext ctx) { ftp = openConnection(ctx); if (ftp != null) { Timer timer = new Timer(); - long start = timer.start(); + timer.start(); if (info) { info(ctx, "Retrieving directory listing"); } retrieveFileList(dir); - long elapsed = timer.stop(); + timer.stop(); if (info) { info(ctx, "Files to copy: " + Utils.formatSizeInBytes(ftpSizeInBytes) + ", " + Utils.formatCnt(fileCnt, "file") + ", " + Utils.formatCnt(dirCnt, "subdirectory", "subdirectories") + " scanned (" + timer.format() + ")"); } @@ -99,7 +99,7 @@ Integer run(HplsqlParser.Copy_from_ftp_stmtContext ctx) { */ void copyFiles(HplsqlParser.Copy_from_ftp_stmtContext ctx) { Timer timer = new Timer(); - long start = timer.start(); + timer.start(); if (fileCnt > 1 && sessions > 1) { if (sessions > fileCnt) { sessions = fileCnt; @@ -254,7 +254,12 @@ void retrieveFileList(String dir) { if (file.isFile()) { if (filePattern == null || Pattern.matches(filePattern, name)) { if (dir != null && !dir.isEmpty()) { - name = dir + "/" + name; + if (dir.endsWith("/")) { + name = dir + name; + } + else { + name = dir + "/" + name; + } } if (!newOnly || !isTargetExists(name)) { fileCnt++; @@ -275,7 +280,12 @@ void retrieveFileList(String dir) { for (FTPFile d : dirs) { String sd = d.getName(); if (dir != null && !dir.isEmpty()) { - sd = dir + "/" + sd; + if (dir.endsWith("/")) { + sd = dir + sd; + } + else { + sd = dir + "/" + sd; + } } retrieveFileList(sd); } @@ -292,7 +302,7 @@ void retrieveFileList(String dir) { FTPClient openConnection(HplsqlParser.Copy_from_ftp_stmtContext ctx) { FTPClient ftp = new FTPClient(); Timer timer = new Timer(); - long start = timer.start(); + timer.start(); try { ftp.connect(host); ftp.enterLocalPassiveMode(); @@ -304,7 +314,7 @@ FTPClient openConnection(HplsqlParser.Copy_from_ftp_stmtContext ctx) { exec.signal(Signal.Type.SQLEXCEPTION, "Cannot login to FTP server: " + host); return null; } - long elapsed = timer.stop(); + timer.stop(); if (info) { info(ctx, "Connected to ftp: " + host + " (" + timer.format() + ")"); } @@ -339,8 +349,20 @@ else if (new File().exists(target)) { * Get the target file relative path and name */ String getTargetFileName(String file) { - int len = dir.length(); - return targetDir + file.substring(len); + String outFile = file; + // Remove source dir from file + if (dir != null) { + if (targetDir != null) { + outFile = targetDir + file.substring(dir.length()); + } + else { + outFile = file.substring(dir.length()); + } + } + else if (targetDir != null) { + outFile = targetDir + "/" + file; + } + return outFile; } /** @@ -348,6 +370,8 @@ String getTargetFileName(String file) { */ void initOptions(HplsqlParser.Copy_from_ftp_stmtContext ctx) { host = evalPop(ctx.expr()).toString(); + user = "anonymous"; + pwd = ""; int cnt = ctx.copy_ftp_option().size(); for (int i = 0; i < cnt; i++) { HplsqlParser.Copy_ftp_optionContext option = ctx.copy_ftp_option(i); diff --git hplsql/src/main/java/org/apache/hive/hplsql/Package.java hplsql/src/main/java/org/apache/hive/hplsql/Package.java index 15be59c..8c422ad 100644 --- hplsql/src/main/java/org/apache/hive/hplsql/Package.java +++ hplsql/src/main/java/org/apache/hive/hplsql/Package.java @@ -133,7 +133,7 @@ public boolean execFunc(String name, HplsqlParser.Expr_func_paramsContext ctx) { } /** - * Execute rocedure + * Execute procedure */ public boolean execProc(String name, HplsqlParser.Expr_func_paramsContext ctx, boolean traceNotExists) { Create_procedure_stmtContext p = proc.get(name.toUpperCase()); diff --git hplsql/src/main/java/org/apache/hive/hplsql/Select.java hplsql/src/main/java/org/apache/hive/hplsql/Select.java index 589e984..403810c 100644 --- hplsql/src/main/java/org/apache/hive/hplsql/Select.java +++ hplsql/src/main/java/org/apache/hive/hplsql/Select.java @@ -147,10 +147,10 @@ else if (ctx.parent instanceof HplsqlParser.StmtContext) { } catch (SQLException e) { exec.signal(query); - exec.closeQuery(query, exec.conf.defaultConnection); + exec.closeQuery(query, conn); return 1; } - exec.closeQuery(query, exec.conf.defaultConnection); + exec.closeQuery(query, conn); return 0; } diff --git hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java index 17d2195..c044616 100644 --- hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java +++ hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java @@ -133,6 +133,10 @@ public Integer createTable(HplsqlParser.Create_table_stmtContext ctx) { exec.append(sql, evalPop(ctx.table_name()).toString(), ctx.T_TABLE().getSymbol(), ctx.table_name().getStart()); Token last = ctx.table_name().getStop(); if (ctx.create_table_preoptions() != null) { + String preopt = evalPop(ctx.create_table_preoptions()).toString(); + if (preopt != null) { + sql.append(" " + preopt); + } last = ctx.create_table_preoptions().stop; } sql.append(createTableDefinition(ctx.create_table_definition(), last)); @@ -167,9 +171,12 @@ String createTableDefinition(HplsqlParser.Create_table_definitionContext ctx, To } exec.append(sql, ctx.T_CLOSE_P().getText(), last, ctx.T_CLOSE_P().getSymbol()); } + // CREATE TABLE AS SELECT statement else { exec.append(sql, evalPop(ctx.select_stmt()).toString(), last, ctx.select_stmt().getStart()); - exec.append(sql, ctx.T_CLOSE_P().getText(), ctx.select_stmt().stop, ctx.T_CLOSE_P().getSymbol()); + if (ctx.T_CLOSE_P() != null) { + exec.append(sql, ctx.T_CLOSE_P().getText(), ctx.select_stmt().stop, ctx.T_CLOSE_P().getSymbol()); + } } HplsqlParser.Create_table_optionsContext options = ctx.create_table_options(); if (options != null) { @@ -188,6 +195,9 @@ public Integer createTableHiveOptions(HplsqlParser.Create_table_options_hive_ite if (ctx.create_table_hive_row_format() != null) { createTableHiveRowFormat(ctx.create_table_hive_row_format()); } + else if (ctx.T_STORED() != null) { + evalString(exec.getText(ctx)); + } return 0; } @@ -575,7 +585,7 @@ public Integer include(HplsqlParser.Include_stmtContext ctx) { file = evalPop(ctx.expr()).toString(); } trace(ctx, "INCLUDE " + file); - exec.includeFile(file); + exec.includeFile(file, true); return 0; } @@ -793,7 +803,7 @@ else if (type == Conn.Type.HIVE && conf.insertValues == Conf.InsertValues.SELECT return 1; } exec.setSqlSuccess(); - exec.closeQuery(query, exec.conf.defaultConnection); + exec.closeQuery(query, conn); return 0; } diff --git hplsql/src/main/java/org/apache/hive/hplsql/Utils.java hplsql/src/main/java/org/apache/hive/hplsql/Utils.java index fb60b22..a768b00 100644 --- hplsql/src/main/java/org/apache/hive/hplsql/Utils.java +++ hplsql/src/main/java/org/apache/hive/hplsql/Utils.java @@ -257,7 +257,10 @@ public static String getExecDir() { */ public static String formatSizeInBytes(long bytes, String postfix) { String out; - if (bytes < 1024) { + if (bytes == 1) { + out = bytes + " byte"; + } + else if (bytes < 1024) { out = bytes + " bytes"; } else if (bytes < 1024 * 1024) { diff --git hplsql/src/main/java/org/apache/hive/hplsql/Var.java hplsql/src/main/java/org/apache/hive/hplsql/Var.java index 63a1f43..480d97c 100644 --- hplsql/src/main/java/org/apache/hive/hplsql/Var.java +++ hplsql/src/main/java/org/apache/hive/hplsql/Var.java @@ -176,8 +176,16 @@ else if (type == val.type) { else if (type == Type.STRING) { cast(val.toString()); } + else if (type == Type.BIGINT) { + if (val.type == Type.STRING) { + value = Long.parseLong((String)val.value); + } + } else if (type == Type.DECIMAL) { - if (val.type == Type.BIGINT) { + if (val.type == Type.STRING) { + value = new BigDecimal((String)val.value); + } + else if (val.type == Type.BIGINT) { value = BigDecimal.valueOf(val.longValue()); } else if (val.type == Type.DOUBLE) { @@ -185,7 +193,10 @@ else if (val.type == Type.DOUBLE) { } } else if (type == Type.DOUBLE) { - if (val.type == Type.BIGINT || val.type == Type.DECIMAL) { + if (val.type == Type.STRING) { + value = new Double((String)val.value); + } + else if (val.type == Type.BIGINT || val.type == Type.DECIMAL) { value = Double.valueOf(val.doubleValue()); } } diff --git hplsql/src/main/resources/hplsql-site.xml hplsql/src/main/resources/hplsql-site.xml index 05fe857..96843dc 100644 --- hplsql/src/main/resources/hplsql-site.xml +++ hplsql/src/main/resources/hplsql-site.xml @@ -62,7 +62,7 @@ hplsql.dual.table - default.dual + Single row, single column table for internal operations diff --git hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java index 9b5a956..8692661 100644 --- hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java +++ hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java @@ -67,6 +67,11 @@ public void testCase() throws Exception { public void testCast() throws Exception { run("cast"); } + + @Test + public void testCast2() throws Exception { + run("cast2"); + } @Test public void testChar() throws Exception { @@ -112,6 +117,11 @@ public void testCreatePackage() throws Exception { public void testCreatePackage2() throws Exception { run("create_package2"); } + + @Test + public void testCreatePackage3() throws Exception { + run("create_package3"); + } @Test public void testCreateProcedure() throws Exception { diff --git hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java index 3e897be..313511d 100644 --- hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java +++ hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java @@ -34,6 +34,11 @@ private final ByteArrayOutputStream out = new ByteArrayOutputStream(); @Test + public void testCreateTableDb2() throws Exception { + run("create_table_db2"); + } + + @Test public void testCreateTableMssql() throws Exception { run("create_table_mssql"); } diff --git hplsql/src/test/queries/db/copy_from_ftp.sql hplsql/src/test/queries/db/copy_from_ftp.sql new file mode 100644 index 0000000..b43515f --- /dev/null +++ hplsql/src/test/queries/db/copy_from_ftp.sql @@ -0,0 +1,2 @@ +copy from ftp 'speedtest.tele2.net' files '1KB.zip'; +copy from ftp 'speedtest.tele2.net' files '512KB.zip' dir '/'; \ No newline at end of file diff --git hplsql/src/test/queries/db/create_function.sql hplsql/src/test/queries/db/create_function.sql new file mode 100644 index 0000000..04dc58b --- /dev/null +++ hplsql/src/test/queries/db/create_function.sql @@ -0,0 +1,11 @@ +CREATE FUNCTION get_count() + RETURNS STRING +BEGIN + DECLARE cnt INT = 0; + SELECT COUNT(*) INTO cnt FROM src + WHERE value RLIKE '^[+-]?[0-9]*[.]?[0-9]*$'; + RETURN cnt; +END; + +-- Call the function +PRINT get_count(); \ No newline at end of file diff --git hplsql/src/test/queries/db/create_procedure2.sql hplsql/src/test/queries/db/create_procedure2.sql new file mode 100644 index 0000000..fd04d13 --- /dev/null +++ hplsql/src/test/queries/db/create_procedure2.sql @@ -0,0 +1,17 @@ +CREATE PROCEDURE load_tab(IN name STRING, OUT result STRING) +BEGIN + DROP TABLE IF EXISTS test_tab; + + CREATE TABLE IF NOT EXISTS test_tab + STORED AS ORC + AS + SELECT * + FROM src; + + SET result = name; + +END; + +DECLARE str STRING; +CALL load_tab('world', str); +PRINT str; \ No newline at end of file diff --git hplsql/src/test/queries/db/map_object2.sql hplsql/src/test/queries/db/map_object2.sql new file mode 100644 index 0000000..6896826 --- /dev/null +++ hplsql/src/test/queries/db/map_object2.sql @@ -0,0 +1,5 @@ +map object version to hive.VERSION at mysqlconn; + +select count(*) from version; + +select count(*) from src; \ No newline at end of file diff --git hplsql/src/test/queries/local/cast2.sql hplsql/src/test/queries/local/cast2.sql new file mode 100644 index 0000000..d68db6d --- /dev/null +++ hplsql/src/test/queries/local/cast2.sql @@ -0,0 +1,10 @@ +temp_int = CAST('1' AS int); +print temp_int +temp_float = CAST('1.2' AS float); +print temp_float +temp_double = CAST('1.2' AS double); +print temp_double +temp_decimal = CAST('1.2' AS decimal(10, 4)); +print temp_decimal +temp_string = CAST('1.2' AS string); +print temp_string \ No newline at end of file diff --git hplsql/src/test/queries/local/create_package3.sql hplsql/src/test/queries/local/create_package3.sql new file mode 100644 index 0000000..467f134 --- /dev/null +++ hplsql/src/test/queries/local/create_package3.sql @@ -0,0 +1,2 @@ +include src/test/queries/local/create_package3_include.sql +a.test(); \ No newline at end of file diff --git hplsql/src/test/queries/local/create_package3_include.sql hplsql/src/test/queries/local/create_package3_include.sql new file mode 100644 index 0000000..5bd0701 --- /dev/null +++ hplsql/src/test/queries/local/create_package3_include.sql @@ -0,0 +1,11 @@ +create or replace package a as +procedure test(); +end; + +create or replace package body a as +procedure test() +is +begin +print "test ok"; +end; +end; \ No newline at end of file diff --git hplsql/src/test/queries/offline/create_table_db2.sql hplsql/src/test/queries/offline/create_table_db2.sql new file mode 100644 index 0000000..ece580a --- /dev/null +++ hplsql/src/test/queries/offline/create_table_db2.sql @@ -0,0 +1,77 @@ + +------------------------------------------------ +-- DDL Statements for Table "ABCDEF_ABI" +------------------------------------------------ + +CREATE TABLE "ABCDEF_ABI" ( + "ABI_CCANT_POSTN_SK" INT NOT NULL , + "LAT_DATE" DATE NOT NULL , + "LAT_TIME" TIMESTAMP NOT NULL , + "LAT_ACTION" CHAR(2) NOT NULL , + "ROW_ID" VARCHAR(15) NOT NULL , + "CREATED" TIMESTAMP NOT NULL , + "CREATED_BY" VARCHAR(15) NOT NULL , + "LAST_UPD" TIMESTAMP NOT NULL , + "LAST_UPD_BY" VARCHAR(15) NOT NULL , + "MODIFICATION_NUM" DECIMAL(10,0) NOT NULL , + "ASGN_DNRM_FLG" CHAR(1) , + "ROLE_CD" VARCHAR(30) , + "STATUS" VARCHAR(30) , + "BD_LAST_UPD" TIMESTAMP , + "BD_LAST_UPD_SRC" VARCHAR(50) , + "ABCDEF_CNT" INT NOT NULL , + "ABCDEF_IND" CHAR(1) NOT NULL , + "ABCDEF_TYP_CD" CHAR(1) NOT NULL WITH DEFAULT , + "PRE_LKO_MAPNG_ID" INT NOT NULL WITH DEFAULT 0 , + "CR_BY_MAPNG_ID" INT NOT NULL , + "DW_CR_TMSP" TIMESTAMP NOT NULL , + "WRK_FLOW_RUN_ID" INT NOT NULL ) +COMPRESS YES +WITH RESTRICT ON DROP +DISTRIBUTE BY HASH("ABI_CCANT_POSTN_SK") + IN "WFDTS2_16K" INDEX IN "WFDIX2_16K" ; + + ------------------------------------------------ +-- DDL Statements for Table "ABCDEF_ABI_ACT" +------------------------------------------------ + + +CREATE TABLE "ABCDEF_ABI_ACT" ( + "ABI_ACT_CONTACT_SK" INT NOT NULL , + "LAT_DATE" DATE NOT NULL , + "LAT_TIME" TIMESTAMP NOT NULL , + "LAT_ACTION" CHAR(2) NOT NULL , + "ROW_ID" VARCHAR(15) NOT NULL , + "ABCDEF_IND" CHAR(1) NOT NULL , + "ABCDEF_TYP_CD" CHAR(1) NOT NULL WITH DEFAULT , + "PRE_LKO_MAPNG_ID" INT NOT NULL WITH DEFAULT 0 , + "BD_LAST_UPD_SRC" VARCHAR(50) , + "UPD_BY_MAPNG_ID" INT NOT NULL , + "WRK_FLOW_RUN_ID" INT NOT NULL ) +COMPRESS YES +WITH RESTRICT ON DROP +DISTRIBUTE BY HASH("ABI_ACT_CONTACT_SK") +IN "WFDTS2_16K" INDEX IN "WFDIX2_16K" ; + +------------------------------------------------ +-- DDL Statements for Table "ABCDEF_ABI_EMP" +------------------------------------------------ + +CREATE TABLE "ABCDEF_ABI_EMP" ( + "ABI_ACT_EMP_SK" INT NOT NULL , + "LAT_DATE" DATE NOT NULL , + "LAT_TIME" TIMESTAMP NOT NULL , + "LAT_ACTION" CHAR(2) NOT NULL , + "ABCDEF_CNT" INT NOT NULL , + "ABCDEF_IND" CHAR(1) NOT NULL , + "ABCDEF_TYP_CD" CHAR(1) NOT NULL WITH DEFAULT , + "PRE_LKO_MAPNG_ID" INT NOT NULL WITH DEFAULT 0 , + "DW_CR_TMSP" TIMESTAMP NOT NULL , + "CR_BY_MAPNG_ID" INT NOT NULL , + "DW_UPD_TMSP" TIMESTAMP NOT NULL , + "UPD_BY_MAPNG_ID" INT NOT NULL , + "WRK_FLOW_RUN_ID" INT NOT NULL ) +COMPRESS YES +WITH RESTRICT ON DROP +DISTRIBUTE BY HASH("ABI_ACT_EMP_SK") +IN "WFDTS2_16K" INDEX IN "WFDIX2_16K" ; \ No newline at end of file diff --git hplsql/src/test/results/db/copy_from_ftp.out.txt hplsql/src/test/results/db/copy_from_ftp.out.txt new file mode 100644 index 0000000..09c5b3f --- /dev/null +++ hplsql/src/test/results/db/copy_from_ftp.out.txt @@ -0,0 +1,2 @@ +Ln:1 COPY FROM FTP +Ln:2 COPY FROM FTP \ No newline at end of file diff --git hplsql/src/test/results/db/create_function.out.txt hplsql/src/test/results/db/create_function.out.txt new file mode 100644 index 0000000..cacf95b --- /dev/null +++ hplsql/src/test/results/db/create_function.out.txt @@ -0,0 +1,13 @@ +Ln:1 CREATE FUNCTION get_count +Ln:11 PRINT +EXEC FUNCTION get_count +Ln:4 DECLARE cnt INT = 0 +Ln:5 SELECT +Ln:5 SELECT COUNT(*) FROM src + WHERE value RLIKE '^[+-]?[0-9]*[.]?[0-9]*$' +Ln:5 SELECT completed successfully +Ln:5 SELECT INTO statement executed +Ln:5 COLUMN: _c0, bigint +Ln:5 SET cnt = 0 +Ln:7 RETURN +0 \ No newline at end of file diff --git hplsql/src/test/results/db/create_procedure2.out.txt hplsql/src/test/results/db/create_procedure2.out.txt new file mode 100644 index 0000000..e68d44b --- /dev/null +++ hplsql/src/test/results/db/create_procedure2.out.txt @@ -0,0 +1,15 @@ +Ln:1 CREATE PROCEDURE load_tab +Ln:15 DECLARE str STRING +Ln:16 EXEC PROCEDURE load_tab +Ln:16 SET PARAM name = world +Ln:16 SET PARAM result = null +Ln:3 DROP +Ln:3 DROP TABLE IF EXISTS test_tab +Ln:5 CREATE TABLE +Ln:5 CREATE TABLE IF NOT EXISTS test_tab STORED AS ORC + AS + SELECT * + FROM src +Ln:11 SET result = 'world' +Ln:17 PRINT +world \ No newline at end of file diff --git hplsql/src/test/results/db/map_object2.out.txt hplsql/src/test/results/db/map_object2.out.txt new file mode 100644 index 0000000..7b4e832 --- /dev/null +++ hplsql/src/test/results/db/map_object2.out.txt @@ -0,0 +1,11 @@ +Ln:1 MAP OBJECT version AS hive.VERSION AT mysqlconn +Ln:3 SELECT +Ln:3 select count(*) from hive.VERSION +Ln:3 SELECT completed successfully +Ln:3 Standalone SELECT executed: 1 columns in the result set +1 +Ln:5 SELECT +Ln:5 select count(*) from src +Ln:5 SELECT completed successfully +Ln:5 Standalone SELECT executed: 1 columns in the result set +500 \ No newline at end of file diff --git hplsql/src/test/results/local/cast2.out.txt hplsql/src/test/results/local/cast2.out.txt new file mode 100644 index 0000000..5852420 --- /dev/null +++ hplsql/src/test/results/local/cast2.out.txt @@ -0,0 +1,15 @@ +Ln:1 SET temp_int = 1 +Ln:2 PRINT +1 +Ln:3 SET temp_float = 1.2 +Ln:4 PRINT +1.2 +Ln:5 SET temp_double = 1.2 +Ln:6 PRINT +1.2 +Ln:7 SET temp_decimal = 1.2 +Ln:8 PRINT +1.2 +Ln:9 SET temp_string = '1.2' +Ln:10 PRINT +1.2 \ No newline at end of file diff --git hplsql/src/test/results/local/create_package3.out.txt hplsql/src/test/results/local/create_package3.out.txt new file mode 100644 index 0000000..f8cafaa --- /dev/null +++ hplsql/src/test/results/local/create_package3.out.txt @@ -0,0 +1,5 @@ +Ln:1 INCLUDE src/test/queries/local/create_package3_include.sql +INCLUDE CONTENT src/test/queries/local/create_package3_include.sql (non-empty) +EXEC PACKAGE PROCEDURE A.test +Ln:9 PRINT +"test ok" \ No newline at end of file diff --git hplsql/src/test/results/local/include.out.txt hplsql/src/test/results/local/include.out.txt index 86cfa05..9503cd1 100644 --- hplsql/src/test/results/local/include.out.txt +++ hplsql/src/test/results/local/include.out.txt @@ -1,8 +1,8 @@ Ln:1 INCLUDE src/test/queries/local/include_file.sql -INLCUDE CONTENT src/test/queries/local/include_file.sql (non-empty) +INCLUDE CONTENT src/test/queries/local/include_file.sql (non-empty) Ln:1 PRINT file included successfully Ln:2 INCLUDE src/test/queries/local/include_file.sql -INLCUDE CONTENT src/test/queries/local/include_file.sql (non-empty) +INCLUDE CONTENT src/test/queries/local/include_file.sql (non-empty) Ln:1 PRINT file included successfully \ No newline at end of file diff --git hplsql/src/test/results/local/var_scope.out.txt hplsql/src/test/results/local/var_scope.out.txt index 0f53edf..1bc7905 100644 --- hplsql/src/test/results/local/var_scope.out.txt +++ hplsql/src/test/results/local/var_scope.out.txt @@ -1,5 +1,5 @@ Ln:1 INCLUDE src/test/queries/local/var_scope_include.sql -INLCUDE CONTENT src/test/queries/local/var_scope_include.sql (non-empty) +INCLUDE CONTENT src/test/queries/local/var_scope_include.sql (non-empty) Ln:3 DECLARE i int = 3 Ln:5 CREATE PROCEDURE p1 Ln:13 CREATE PROCEDURE p2 diff --git hplsql/src/test/results/offline/create_table_db2.out.txt hplsql/src/test/results/offline/create_table_db2.out.txt new file mode 100644 index 0000000..860e7d0 --- /dev/null +++ hplsql/src/test/results/offline/create_table_db2.out.txt @@ -0,0 +1,52 @@ +Ln:6 CREATE TABLE +Ln:6 CREATE TABLE `ABCDEF_ABI` ( + `ABI_CCANT_POSTN_SK` INT , + `LAT_DATE` DATE , + `LAT_TIME` TIMESTAMP , + `LAT_ACTION` CHAR(2) , + `ROW_ID` VARCHAR(15) , + `CREATED` TIMESTAMP , + `CREATED_BY` VARCHAR(15) , + `LAST_UPD` TIMESTAMP , + `LAST_UPD_BY` VARCHAR(15) , + `MODIFICATION_NUM` DECIMAL(10,0) , + `ASGN_DNRM_FLG` CHAR(1) , + `ROLE_CD` VARCHAR(30) , + `STATUS` VARCHAR(30) , + `BD_LAST_UPD` TIMESTAMP , + `BD_LAST_UPD_SRC` VARCHAR(50) , + `ABCDEF_CNT` INT , + `ABCDEF_IND` CHAR(1) , + `ABCDEF_TYP_CD` CHAR(1) , + `PRE_LKO_MAPNG_ID` INT , + `CR_BY_MAPNG_ID` INT , + `DW_CR_TMSP` TIMESTAMP , + `WRK_FLOW_RUN_ID` INT ) +Ln:39 CREATE TABLE +Ln:39 CREATE TABLE `ABCDEF_ABI_ACT` ( + `ABI_ACT_CONTACT_SK` INT , + `LAT_DATE` DATE , + `LAT_TIME` TIMESTAMP , + `LAT_ACTION` CHAR(2) , + `ROW_ID` VARCHAR(15) , + `ABCDEF_IND` CHAR(1) , + `ABCDEF_TYP_CD` CHAR(1) , + `PRE_LKO_MAPNG_ID` INT , + `BD_LAST_UPD_SRC` VARCHAR(50) , + `UPD_BY_MAPNG_ID` INT , + `WRK_FLOW_RUN_ID` INT ) +Ln:60 CREATE TABLE +Ln:60 CREATE TABLE `ABCDEF_ABI_EMP` ( + `ABI_ACT_EMP_SK` INT , + `LAT_DATE` DATE , + `LAT_TIME` TIMESTAMP , + `LAT_ACTION` CHAR(2) , + `ABCDEF_CNT` INT , + `ABCDEF_IND` CHAR(1) , + `ABCDEF_TYP_CD` CHAR(1) , + `PRE_LKO_MAPNG_ID` INT , + `DW_CR_TMSP` TIMESTAMP , + `CR_BY_MAPNG_ID` INT , + `DW_UPD_TMSP` TIMESTAMP , + `UPD_BY_MAPNG_ID` INT , + `WRK_FLOW_RUN_ID` INT ) \ No newline at end of file