diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 22bb22d..71c94e4 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -413,6 +413,8 @@ HIVEADDEDJARS("hive.added.jars.path", ""), HIVEADDEDARCHIVES("hive.added.archives.path", ""), + HIVE_CURRENT_DATABASE("hive.current.database", ""), // internal usage only + // for hive script operator HIVES_AUTO_PROGRESS_TIMEOUT("hive.auto.progress.timeout", 0), HIVETABLENAME("hive.table.name", ""), diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index 48b7ee1..30b4076 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -283,6 +283,8 @@ registerUDF("^", UDFOPBitXor.class, true); registerUDF("~", UDFOPBitNot.class, true); + registerGenericUDF("current_database", UDFCurrentDB.class); + registerGenericUDF("isnull", GenericUDFOPNull.class); registerGenericUDF("isnotnull", GenericUDFOPNotNull.class); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java index 1bfcee6..083d574 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java @@ -50,9 +50,13 @@ public void setSignature(ArrayList signature) { @Override public String toString() { - StringBuilder sb = new StringBuilder('('); + StringBuilder sb = new StringBuilder(); + sb.append('('); if (signature != null) { - for (ColumnInfo col : signature) { + for (ColumnInfo col: signature) { + if (sb.length() > 1) { + sb.append(','); + } sb.append(col.toString()); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index 54e1dd8..551b742 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -31,6 +31,8 @@ import java.util.List; import java.util.Properties; +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; + import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -547,7 +549,7 @@ private void handleSampling(DriverContext context, MapWork mWork, JobConf job, H protected void setInputAttributes(Configuration conf) { MapWork mWork = work.getMapWork(); if (mWork.getInputformat() != null) { - HiveConf.setVar(conf, HiveConf.ConfVars.HIVEINPUTFORMAT, mWork.getInputformat()); + HiveConf.setVar(conf, ConfVars.HIVEINPUTFORMAT, mWork.getInputformat()); } if (mWork.getIndexIntermediateFile() != null) { conf.set("hive.index.compact.file", mWork.getIndexIntermediateFile()); @@ -556,6 +558,18 @@ protected void setInputAttributes(Configuration conf) { // Intentionally overwrites anything the user may have put here conf.setBoolean("hive.input.format.sorted", mWork.isInputFormatSorted()); + + if (HiveConf.getVar(conf, ConfVars.HIVE_CURRENT_DATABASE, null) == null) { + HiveConf.setVar(conf, ConfVars.HIVE_CURRENT_DATABASE, getCurrentDB()); + } + } + + public static String getCurrentDB() { + String currentDB = null; + if (SessionState.get() != null) { + currentDB = SessionState.get().getCurrentDatabase(); + } + return currentDB == null ? DEFAULT_DATABASE_NAME : currentDB; } public boolean mapStarted() { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java new file mode 100644 index 0000000..fd60fed --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java @@ -0,0 +1,126 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.mapred.InputSplit; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.JobConfigurable; +import org.apache.hadoop.mapred.RecordReader; +import org.apache.hadoop.mapred.Reporter; + +/** + * NullRowsInputFormat outputs null rows, maximum 100. + */ +public class NullRowsInputFormat implements InputFormat, + JobConfigurable { + + static final int MAX_ROW = 100; // to prevent infinite loop + static final Log LOG = LogFactory.getLog(NullRowsRecordReader.class.getName()); + + public static class DummyInputSplit implements InputSplit { + public DummyInputSplit() { + } + + @Override + public long getLength() throws IOException { + return 1; + } + + @Override + public String[] getLocations() throws IOException { + return new String[0]; + } + + @Override + public void readFields(DataInput arg0) throws IOException { + } + + @Override + public void write(DataOutput arg0) throws IOException { + } + + } + + public static class NullRowsRecordReader implements RecordReader { + + private int counter; + + public NullRowsRecordReader() { + } + @Override + public void close() throws IOException { + } + + @Override + public NullWritable createKey() { + return NullWritable.get(); + } + + @Override + public NullWritable createValue() { + return NullWritable.get(); + } + + @Override + public long getPos() throws IOException { + return counter; + } + + @Override + public float getProgress() throws IOException { + return (float)counter / MAX_ROW; + } + + @Override + public boolean next(NullWritable arg0, NullWritable arg1) throws IOException { + if (counter++ < MAX_ROW) { + return true; + } + return false; + } + } + + @Override + public RecordReader getRecordReader(InputSplit arg0, + JobConf arg1, Reporter arg2) throws IOException { + return new NullRowsRecordReader(); + } + + @Override + public InputSplit[] getSplits(JobConf arg0, int arg1) throws IOException { + InputSplit[] ret = new InputSplit[1]; + ret[0] = new DummyInputSplit(); + LOG.info("Calculating splits"); + return ret; + } + + @Override + public void configure(JobConf job) { + LOG.info("Using null rows input format"); + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java index 8ce1c15..91a6d5a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java @@ -18,19 +18,11 @@ package org.apache.hadoop.hive.ql.io; -import java.io.DataInput; -import java.io.DataOutput; import java.io.IOException; -import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.JobConfigurable; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; @@ -39,95 +31,34 @@ * metadata only queries. * */ -public class OneNullRowInputFormat implements - InputFormat, JobConfigurable { - private static final Log LOG = LogFactory.getLog(OneNullRowInputFormat.class - .getName()); - MapredWork mrwork = null; - List partitions; - long len; - - static public class DummyInputSplit implements InputSplit { - public DummyInputSplit() { - } - - @Override - public long getLength() throws IOException { - return 1; - } - - @Override - public String[] getLocations() throws IOException { - return new String[0]; - } - - @Override - public void readFields(DataInput arg0) throws IOException { - } - - @Override - public void write(DataOutput arg0) throws IOException { - } +public class OneNullRowInputFormat extends NullRowsInputFormat { + @Override + public RecordReader getRecordReader(InputSplit arg0, + JobConf arg1, Reporter arg2) throws IOException { + return new OneNullRowRecordReader(); } - static public class OneNullRowRecordReader implements RecordReader { - private boolean processed = false; - public OneNullRowRecordReader() { - } - @Override - public void close() throws IOException { - } - - @Override - public NullWritable createKey() { - return NullWritable.get(); - } - - @Override - public NullWritable createValue() { - return NullWritable.get(); - } + public static class OneNullRowRecordReader extends NullRowsRecordReader { + private boolean processed; @Override public long getPos() throws IOException { - return (processed ? 1 : 0); + return processed ? 1 : 0; } @Override public float getProgress() throws IOException { - return (float) (processed ? 1.0 : 0.0); + return processed ? 1.0f : 0f; } @Override - public boolean next(NullWritable arg0, NullWritable arg1) throws IOException { - if(processed) { + public boolean next(NullWritable key, NullWritable value) throws IOException { + if (processed) { return false; - } else { - processed = true; - return true; } + processed = true; + return true; } - - } - - @Override - public RecordReader getRecordReader(InputSplit arg0, JobConf arg1, Reporter arg2) - throws IOException { - return new OneNullRowRecordReader(); - } - - @Override - public InputSplit[] getSplits(JobConf arg0, int arg1) throws IOException { - InputSplit[] ret = new InputSplit[1]; - ret[0] = new DummyInputSplit(); - LOG.info("Calculating splits"); - return ret; } - - @Override - public void configure(JobConf job) { - LOG.info("Using one null row input format"); - } - -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 17f3552..637f155 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -1973,7 +1973,7 @@ regularBody[boolean topLevel] singleSelectStatement : selectClause - fromClause + fromClause? whereClause? groupByClause? havingClause? @@ -1982,7 +1982,7 @@ singleSelectStatement distributeByClause? sortByClause? window_clause? - limitClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) + limitClause? -> ^(TOK_QUERY fromClause? ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? havingClause? orderByClause? clusterByClause? distributeByClause? sortByClause? window_clause? limitClause?)) ; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 719b496..e685a44 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.parse; +import java.io.IOException; import java.io.Serializable; -import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -39,6 +39,8 @@ import org.antlr.runtime.tree.TreeWizard; import org.antlr.runtime.tree.TreeWizard.ContextVisitor; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JavaUtils; @@ -72,11 +74,14 @@ import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.UnionOperator; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; +import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; +import org.apache.hadoop.hive.ql.io.NullRowsInputFormat; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.lib.GraphWalker; @@ -165,6 +170,7 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; +import org.apache.hadoop.hive.serde2.NullStructSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; @@ -178,6 +184,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.mapred.InputFormat; /** @@ -187,6 +194,9 @@ */ public class SemanticAnalyzer extends BaseSemanticAnalyzer { + + private static final String DUMMY_ALIAS = "_dummy_alias"; + private HashMap opToPartPruner; private HashMap opToPartList; private HashMap> topOps; @@ -8615,6 +8625,15 @@ public Operator genPlan(QB qb) throws SemanticException { aliasToOpInfo.put(alias, op); } + if (aliasToOpInfo.isEmpty()) { + qb.getMetaData().setSrcForAlias(DUMMY_ALIAS, getDummyTable()); + TableScanOperator op = (TableScanOperator) genTablePlan(DUMMY_ALIAS, qb); + op.getConf().setRowLimit(1); + qb.addAlias(DUMMY_ALIAS); + qb.setTabAlias(DUMMY_ALIAS, DUMMY_ALIAS); + aliasToOpInfo.put(DUMMY_ALIAS, op); + } + Operator srcOpInfo = null; Operator lastPTFOp = null; @@ -8696,6 +8715,37 @@ public Operator genPlan(QB qb) throws SemanticException { return bodyOpInfo; } + private Table getDummyTable() throws SemanticException { + Path dummyPath = createDummyFile(); + Table desc = new Table(ExecDriver.getCurrentDB(), DUMMY_ALIAS); + desc.getTTable().getSd().setLocation(dummyPath.toString()); + desc.getTTable().getSd().getSerdeInfo().setSerializationLib(NullStructSerDe.class.getName()); + desc.setInputFormatClass(NullRowsInputFormat.class); + desc.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); + return desc; + } + + // add dummy data for not removed by CombineHiveInputFormat, etc. + private Path createDummyFile() throws SemanticException { + Path dummyPath = new Path(ctx.getMRScratchDir(), "dummy_path"); + Path dummyFile = new Path(dummyPath, "dummy_file"); + FSDataOutputStream fout = null; + try { + FileSystem fs = dummyFile.getFileSystem(conf); + if (fs.exists(dummyFile)) { + return dummyPath; + } + fout = fs.create(dummyFile); + fout.write(1); + fout.close(); + } catch (IOException e) { + throw new SemanticException(e); + } finally { + IOUtils.closeStream(fout); + } + return dummyPath; + } + /** * Generates the operator DAG needed to implement lateral views and attaches * it to the TS operator. diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java new file mode 100644 index 0000000..f2d2685 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java @@ -0,0 +1,46 @@ +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.MapredContext; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.io.Text; + +// deterministic in the query range +@Description(name = "current_database", + value = "_FUNC_() - returns currently using database name") +public class UDFCurrentDB extends GenericUDF { + + private MapredContext context; + + @Override + public void configure(MapredContext context) { + this.context = context; + } + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + String database; + if (context != null) { + database = context.getJobConf().get("hive.current.database"); + } else { + database = SessionState.get().getCurrentDatabase(); + } + return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( + TypeInfoFactory.stringTypeInfo, new Text(database)); + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + throw new IllegalStateException("never"); + } + + @Override + public String getDisplayString(String[] children) { + return "current_database()"; + } +} diff --git ql/src/test/queries/clientpositive/select_dummy_source.q ql/src/test/queries/clientpositive/select_dummy_source.q new file mode 100644 index 0000000..25a1a81 --- /dev/null +++ ql/src/test/queries/clientpositive/select_dummy_source.q @@ -0,0 +1,33 @@ +explain +select 'a', 100; +select 'a', 100; + +--evaluation +explain +select 1 + 1; +select 1 + 1; + +-- explode (not possible for lateral view) +explain +select explode(array('a', 'b')); +select explode(array('a', 'b')); + +set hive.fetch.task.conversion=more; + +explain +select 'a', 100; +select 'a', 100; + +explain +select 1 + 1; +select 1 + 1; + +explain +select explode(array('a', 'b')); +select explode(array('a', 'b')); + +-- subquery +explain +select 2 + 3,x from (select 1 + 2 x) X; +select 2 + 3,x from (select 1 + 2 x) X; + diff --git ql/src/test/queries/clientpositive/udf_current_database.q ql/src/test/queries/clientpositive/udf_current_database.q new file mode 100644 index 0000000..4aed498 --- /dev/null +++ ql/src/test/queries/clientpositive/udf_current_database.q @@ -0,0 +1,12 @@ +DESCRIBE FUNCTION current_database; + +explain +select current_database(); +select current_database(); + +create database xxx; +use xxx; + +explain +select current_database(); +select current_database(); diff --git ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out index d73a0fa..4f7610c 100644 --- ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out +++ ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out @@ -27,4 +27,5 @@ POSTHOOK: query: CREATE TABLE part( ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@part -FAILED: ParseException line 5:46 missing ) at 'order' near 'p_mfgr' in table name +FAILED: ParseException line 5:46 missing ) at 'order' near 'p_mfgr' +line 5:61 missing EOF at ')' near 'p_mfgr' diff --git ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out index 48139f0..090b621 100644 --- ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out +++ ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out @@ -27,4 +27,5 @@ POSTHOOK: query: CREATE TABLE part( ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@part -FAILED: ParseException line 5:45 missing ) at 'sort' near 'p_mfgr' in table name +FAILED: ParseException line 5:45 missing ) at 'sort' near 'p_mfgr' +line 5:59 missing EOF at ')' near 'p_mfgr' diff --git ql/src/test/results/clientnegative/select_udtf_alias.q.out ql/src/test/results/clientnegative/select_udtf_alias.q.out index 614a18e..68e918b 100644 --- ql/src/test/results/clientnegative/select_udtf_alias.q.out +++ ql/src/test/results/clientnegative/select_udtf_alias.q.out @@ -1 +1 @@ -FAILED: ParseException line 3:49 missing FROM at 'LIMIT' near ')' in table name +FAILED: SemanticException [Error 10083]: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 1 aliases but got 2 diff --git ql/src/test/results/clientpositive/select_dummy_source.q.out ql/src/test/results/clientpositive/select_dummy_source.q.out new file mode 100644 index 0000000..de8608f --- /dev/null +++ ql/src/test/results/clientpositive/select_dummy_source.q.out @@ -0,0 +1,295 @@ +PREHOOK: query: explain +select 'a', 100 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 'a', 100 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: 'a' (type: string), 100 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + +PREHOOK: query: select 'a', 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select 'a', 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +a 100 +PREHOOK: query: --evaluation +explain +select 1 + 1 +PREHOOK: type: QUERY +POSTHOOK: query: --evaluation +explain +select 1 + 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: (1 + 1) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + +PREHOOK: query: select 1 + 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select 1 + 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +2 +PREHOOK: query: -- explode (not possible for lateral view) +explain +select explode(array('a', 'b')) +PREHOOK: type: QUERY +POSTHOOK: query: -- explode (not possible for lateral view) +explain +select explode(array('a', 'b')) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: array('a','b') (type: array) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + function name: explode + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + +PREHOOK: query: select explode(array('a', 'b')) +PREHOOK: type: QUERY +PREHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select explode(array('a', 'b')) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +a +b +PREHOOK: query: explain +select 'a', 100 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 'a', 100 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: 'a' (type: string), 100 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + ListSink + +PREHOOK: query: select 'a', 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select 'a', 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +a 100 +PREHOOK: query: explain +select 1 + 1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 1 + 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: (1 + 1) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + ListSink + +PREHOOK: query: select 1 + 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select 1 + 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +2 +PREHOOK: query: explain +select explode(array('a', 'b')) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select explode(array('a', 'b')) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: array('a','b') (type: array) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + function name: explode + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + +PREHOOK: query: select explode(array('a', 'b')) +PREHOOK: type: QUERY +PREHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select explode(array('a', 'b')) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +a +b +PREHOOK: query: -- subquery +explain +select 2 + 3,x from (select 1 + 2 x) X +PREHOOK: type: QUERY +POSTHOOK: query: -- subquery +explain +select 2 + 3,x from (select 1 + 2 x) X +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: (2 + 3) (type: int), (1 + 2) (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + +PREHOOK: query: select 2 + 3,x from (select 1 + 2 x) X +PREHOOK: type: QUERY +PREHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select 2 + 3,x from (select 1 + 2 x) X +POSTHOOK: type: QUERY +POSTHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +5 3 diff --git ql/src/test/results/clientpositive/show_functions.q.out ql/src/test/results/clientpositive/show_functions.q.out index 57c9036..3400ac2 100644 --- ql/src/test/results/clientpositive/show_functions.q.out +++ ql/src/test/results/clientpositive/show_functions.q.out @@ -50,6 +50,7 @@ covar_pop covar_samp create_union cume_dist +current_database date_add date_sub datediff @@ -218,6 +219,7 @@ covar_pop covar_samp create_union cume_dist +current_database PREHOOK: query: SHOW FUNCTIONS '.*e$' PREHOOK: type: SHOWFUNCTIONS POSTHOOK: query: SHOW FUNCTIONS '.*e$' @@ -225,6 +227,7 @@ POSTHOOK: type: SHOWFUNCTIONS assert_true case coalesce +current_database decode e encode diff --git ql/src/test/results/clientpositive/udf_current_database.q.out ql/src/test/results/clientpositive/udf_current_database.q.out new file mode 100644 index 0000000..2f68da2 --- /dev/null +++ ql/src/test/results/clientpositive/udf_current_database.q.out @@ -0,0 +1,99 @@ +PREHOOK: query: DESCRIBE FUNCTION current_database +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION current_database +POSTHOOK: type: DESCFUNCTION +current_database() - returns currently using database name +PREHOOK: query: explain +select current_database() +PREHOOK: type: QUERY +POSTHOOK: query: explain +select current_database() +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: current_database() (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + +PREHOOK: query: select current_database() +PREHOOK: type: QUERY +PREHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select current_database() +POSTHOOK: type: QUERY +POSTHOOK: Input: default@_dummy_alias +#### A masked pattern was here #### +default +PREHOOK: query: create database xxx +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database xxx +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use xxx +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use xxx +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: explain +select current_database() +PREHOOK: type: QUERY +POSTHOOK: query: explain +select current_database() +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_alias + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: current_database() (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + +PREHOOK: query: select current_database() +PREHOOK: type: QUERY +PREHOOK: Input: xxx@_dummy_alias +#### A masked pattern was here #### +POSTHOOK: query: select current_database() +POSTHOOK: type: QUERY +POSTHOOK: Input: xxx@_dummy_alias +#### A masked pattern was here #### +xxx diff --git ql/src/test/results/compiler/errors/invalid_select.q.out ql/src/test/results/compiler/errors/invalid_select.q.out index af165bb..9e468d7 100644 --- ql/src/test/results/compiler/errors/invalid_select.q.out +++ ql/src/test/results/compiler/errors/invalid_select.q.out @@ -1,2 +1 @@ -Parse Error: line 3:6 missing FROM at '(' near '(' in subquery source -line 3:7 cannot recognize input near 'b' ')' 'FROM' in subquery source \ No newline at end of file +Parse Error: line 3:6 missing EOF at '(' near 'trim' \ No newline at end of file