diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java new file mode 100644 index 0000000000..b062aa3ed7 --- /dev/null +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Arrays; +import org.apache.hadoop.io.Writable; + +/** + * DBRecordWritable writes serialized row by row data to the underlying database. + */ +public class DBRecordWritable implements Writable, + org.apache.hadoop.mapreduce.lib.db.DBWritable { + + private Object[] columnValues; + + public DBRecordWritable() { + } + + public DBRecordWritable(int numColumns) { + this.columnValues = new Object[numColumns]; + } + + public void clear() { + Arrays.fill(columnValues, null); + } + + public void set(int i, Object columnObject) { + columnValues[i] = columnObject; + } + + @Override + public void readFields(ResultSet rs) throws SQLException { + // do nothing + } + + @Override + public void write(PreparedStatement statement) throws SQLException { + if (columnValues == null) { + throw new SQLException("No data available to be written"); + } + for (int i = 0; i < columnValues.length; i++) { + statement.setObject(i + 1, columnValues[i]); + } + } + + @Override + public void readFields(DataInput in) throws IOException { + // do nothing + } + + @Override + public void write(DataOutput out) throws IOException { + // do nothing + } + +} diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcOutputFormat.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcOutputFormat.java index 26fb3cdd09..693b040a84 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcOutputFormat.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcOutputFormat.java @@ -18,12 +18,15 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputFormat; +import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.util.Progressable; +import org.apache.hive.storage.jdbc.dao.GenericJdbcDatabaseAccessor; import java.io.IOException; import java.util.Properties; @@ -41,7 +44,10 @@ public RecordWriter getHiveRecordWriter(JobConf jc, boolean isCompressed, Properties tableProperties, Progressable progress) throws IOException { - throw new UnsupportedOperationException("Write operations are not allowed."); + TaskAttemptContext taskAttemptContext = ShimLoader.getHadoopShims().newTaskAttemptContext(jc, null); + org.apache.hadoop.mapreduce.RecordWriter recordWriter = new GenericJdbcDatabaseAccessor().getRecordWriter(taskAttemptContext); + // Wrapping DBRecordWriter in JdbcRecordWriter + return new JdbcRecordWriter((org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter) recordWriter); } diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordWriter.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordWriter.java new file mode 100644 index 0000000000..c27f1d5038 --- /dev/null +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordWriter.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter; +import org.apache.hadoop.util.StringUtils; + +/** + * JdbcRecordWriter is wrapper class to write data to the underlying database. + */ +public class JdbcRecordWriter implements RecordWriter { + private static final Log LOG = LogFactory.getLog(JdbcRecordWriter.class); + + @SuppressWarnings("rawtypes") + private final DBRecordWriter dbRecordWriter; + + @SuppressWarnings("rawtypes") + public JdbcRecordWriter(DBRecordWriter writer) { + this.dbRecordWriter = writer; + } + + @SuppressWarnings("unchecked") + @Override + public void write(Writable w) throws IOException { + dbRecordWriter.write((DBRecordWritable) w, null); + } + + @Override + public void close(boolean abort) throws IOException { + if (abort) { + Connection conn = dbRecordWriter.getConnection(); + try { + conn.rollback(); + } catch (SQLException ex) { + LOG.warn(StringUtils.stringifyException(ex)); + } finally { + try { + conn.close(); + } catch (SQLException ex) { + throw new IOException(ex.getMessage()); + } + } + } else { + dbRecordWriter.close(null); + } + } + +} diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java index 0a1bf2de01..074fddd697 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java @@ -24,9 +24,13 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; @@ -61,9 +65,11 @@ private static final Logger LOGGER = LoggerFactory.getLogger(JdbcSerDe.class); private String[] hiveColumnNames; + private int numColumns; private PrimitiveTypeInfo[] hiveColumnTypes; private ObjectInspector inspector; private List row; + private DBRecordWritable dbRecordWritable; /* @@ -113,6 +119,9 @@ public void initialize(Configuration conf, Properties properties) throws SerDeEx throw new SerDeException("Received an empty Hive column type definition"); } + numColumns = hiveColumnNames.length; + dbRecordWritable = new DBRecordWritable(numColumns); + // Populate column types and inspector hiveColumnTypes = new PrimitiveTypeInfo[hiveColumnTypesList.size()]; List fieldInspectors = new ArrayList<>(hiveColumnNames.length); @@ -142,6 +151,67 @@ public void initialize(Configuration conf, Properties properties) throws SerDeEx } } + @Override + public DBRecordWritable serialize(Object row, ObjectInspector inspector) throws SerDeException { + LOGGER.trace("Serializing from SerDe"); + if ((row == null) || (hiveColumnTypes == null)) { + throw new SerDeException("JDBC SerDe hasn't been initialized properly"); + } + + if (((Object[])row).length != numColumns) { + throw new SerDeException(String.format("Required %d columns, received %d.", numColumns, ((Object[])row).length)); + } + + dbRecordWritable.clear(); + for (int i = 0; i < numColumns; i++) { + Object rowData = ((Object[])row)[i]; + switch (hiveColumnTypes[i].getPrimitiveCategory()) { + case INT: + rowData = Integer.valueOf(rowData.toString()); + break; + case SHORT: + rowData = Short.valueOf(rowData.toString()); + break; + case BYTE: + rowData = Byte.valueOf(rowData.toString()); + break; + case LONG: + rowData = Long.valueOf(rowData.toString()); + break; + case FLOAT: + rowData = Float.valueOf(rowData.toString()); + break; + case DOUBLE: + rowData = Double.valueOf(rowData.toString()); + break; + case DECIMAL: + int scale = ((HiveDecimalWritable) rowData).getScale(); + long value = ((HiveDecimalWritable) rowData).getHiveDecimal().unscaledValue().longValue(); + rowData = java.math.BigDecimal.valueOf(value, scale); + break; + case BOOLEAN: + rowData = Boolean.valueOf(rowData.toString()); + break; + case CHAR: + case VARCHAR: + case STRING: + rowData = String.valueOf(rowData.toString()); + break; + case DATE: + rowData = java.sql.Date.valueOf(rowData.toString()); + break; + case TIMESTAMP: + rowData = java.sql.Timestamp.valueOf(rowData.toString()); + break; + default: + //do nothing + break; + } + dbRecordWritable.set(i, rowData); + } + return dbRecordWritable; + } + @Override public Object deserialize(Writable blob) throws SerDeException { LOGGER.trace("Deserializing from SerDe"); @@ -270,12 +340,6 @@ public ObjectInspector getObjectInspector() throws SerDeException { } - @Override - public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException { - throw new UnsupportedOperationException("Writes are not allowed"); - } - - @Override public SerDeStats getSerDeStats() { return null; diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java index fab9829abd..e9f1631b1f 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java @@ -41,4 +41,23 @@ protected String addLimitToQuery(String sql, int limit) { } return sql + " LIMIT " + limit; } + + @Override + protected String constructQuery(String table, String[] columnNames) { + if(columnNames == null) { + throw new IllegalArgumentException("Column names may not be null"); + } + + StringBuilder query = new StringBuilder(); + query.append("INSERT INTO ").append(table).append(" VALUES ("); + + for (int i = 0; i < columnNames.length; i++) { + query.append("?"); + if(i != columnNames.length - 1) { + query.append(","); + } + } + query.append(")"); + return query.toString(); + } } diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java index e531ecc17a..f39a641a5c 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java @@ -55,6 +55,10 @@ public static DatabaseAccessor getAccessor(DatabaseType dbType) { accessor = new DB2DatabaseAccessor(); break; + case DERBY: + accessor = new DerbyDatabaseAccessor(); + break; + default: accessor = new GenericJdbcDatabaseAccessor(); break; diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DerbyDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DerbyDatabaseAccessor.java new file mode 100644 index 0000000000..968f573a5e --- /dev/null +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DerbyDatabaseAccessor.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc.dao; + +/** + * Derby specific data accessor + */ +public class DerbyDatabaseAccessor extends GenericJdbcDatabaseAccessor { + @Override + protected String constructQuery(String table, String[] columnNames) { + if(columnNames == null) { + throw new IllegalArgumentException("Column names may not be null"); + } + + StringBuilder query = new StringBuilder(); + query.append("INSERT INTO ").append(table).append(" VALUES ("); + + for (int i = 0; i < columnNames.length; i++) { + query.append("?"); + if(i != columnNames.length - 1) { + query.append(","); + } + } + query.append(")"); + return query.toString(); + } +} diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java index c2e7473e40..8dd9b93265 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java @@ -21,6 +21,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.mapreduce.RecordWriter; +import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +34,7 @@ import javax.sql.DataSource; +import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -172,6 +176,55 @@ public int getTotalNumberOfRecords(Configuration conf) throws HiveJdbcDatabaseAc } } + public RecordWriter getRecordWriter(TaskAttemptContext context) + throws IOException { + Configuration conf = context.getConfiguration(); + String tableName = conf.get(JdbcStorageConfig.TABLE.getPropertyName()); + + if (tableName == null || tableName.isEmpty()) { + throw new IllegalArgumentException("Table name should be defined"); + } + Connection conn = null; + PreparedStatement ps = null; + String[] columnNames = conf.get(serdeConstants.LIST_COLUMNS).split(","); + + try { + initializeDatabaseConnection(conf); + conn = dbcpDataSource.getConnection(); + ps = conn.prepareStatement(constructQuery(tableName, columnNames)); + return new org.apache.hadoop.mapreduce.lib.db.DBOutputFormat() + .new DBRecordWriter(conn, ps); + } catch (Exception e) { + cleanupResources(conn, ps, null); + throw new IOException(e.getMessage()); + } + } + + /** + * Constructs the query used as the prepared statement to insert data. + * + * @param table + * the table to insert into + * @param columnNames + * the columns to insert into + */ + protected String constructQuery(String table, String[] columnNames) { + if(columnNames == null) { + throw new IllegalArgumentException("Column names may not be null"); + } + + StringBuilder query = new StringBuilder(); + query.append("INSERT INTO ").append(table).append(" VALUES ("); + + for (int i = 0; i < columnNames.length; i++) { + query.append("?"); + if(i != columnNames.length - 1) { + query.append(","); + } + } + query.append(");"); + return query.toString(); + } /** * Uses generic JDBC escape functions to add a limit and offset clause to a query string diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java index 4a993fb5ac..636f6bd9ce 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java @@ -49,4 +49,23 @@ protected String addLimitToQuery(String sql, int limit) { return "SELECT * FROM (" + sql + ") WHERE ROWNUM <= " + limit; } + @Override + protected String constructQuery(String table, String[] columnNames) { + if(columnNames == null) { + throw new IllegalArgumentException("Column names may not be null"); + } + + StringBuilder query = new StringBuilder(); + query.append("INSERT INTO ").append(table).append(" VALUES ("); + + for (int i = 0; i < columnNames.length; i++) { + query.append("?"); + if(i != columnNames.length - 1) { + query.append(","); + } + } + query.append(")"); + return query.toString(); + } + } diff --git a/ql/src/test/queries/clientpositive/jdbc_handler.q b/ql/src/test/queries/clientpositive/jdbc_handler.q index d086735594..2c7e3fdc52 100644 --- a/ql/src/test/queries/clientpositive/jdbc_handler.q +++ b/ql/src/test/queries/clientpositive/jdbc_handler.q @@ -91,3 +91,76 @@ from order by a,b; describe tables; + +-- Tests for inserting to jdbc data source + +FROM src + +SELECT dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_insert_derby_as_external_table_db;create=true','','', +'CREATE TABLE INSERT_TO_DERBY_TABLE (a BOOLEAN, b INTEGER, c BIGINT, d FLOAT, e DOUBLE, f DATE, g VARCHAR(27), + h VARCHAR(27), i CHAR(2), j TIMESTAMP, k DECIMAL(5,4), l SMALLINT, m SMALLINT)' ) + +limit 1; + +CREATE EXTERNAL TABLE insert_to_ext_derby_table +( + a BOOLEAN, + b INTEGER, + c BIGINT, + d FLOAT, + e DOUBLE, + f DATE, + g VARCHAR(27), + h STRING, + i CHAR(2), + j TIMESTAMP, + k DECIMAL(5,4), + l TINYINT, + m SMALLINT + ) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", + "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_insert_derby_as_external_table_db;create=true;collation=TERRITORY_BASED:PRIMARY", + "hive.sql.dbcp.username" = "APP", + "hive.sql.dbcp.password" = "mine", + "hive.sql.table" = "INSERT_TO_DERBY_TABLE", + "hive.sql.dbcp.maxActive" = "1" +); + +CREATE TABLE test_insert_tbl +( + a BOOLEAN, + b INTEGER, + c BIGINT, + d FLOAT, + e DOUBLE, + f DATE, + g VARCHAR(27), + h STRING, + i CHAR(2), + j TIMESTAMP, + k DECIMAL(5,4), + l TINYINT, + m SMALLINT + ); + +INSERT INTO test_insert_tbl VALUES(true, 342, 8900, 9.63, 1099.9999, '2019-04-11', 'abcd', 'efgh', 'k', '2019-05-01 00:00:00', 1.8899, 1, 2); + +-- Inserting single row of data + +INSERT INTO insert_to_ext_derby_table VALUES(true, 10, 100, 2.63, 999.9999, '2019-01-11', 'test', 'test1', 'z', '2019-01-01 00:00:00', 1.7899, 1, 2); + +INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl; +select * from insert_to_ext_derby_table; + +INSERT INTO insert_to_ext_derby_table VALUES(false, 324, 53465, 2.6453, 599.9999, '2019-04-11', 'fgeg', 'asda', 'k', '2019-03-01 10:00:00', 1.7899, 1, 2); + +-- Inserting multiple row of data +INSERT INTO insert_to_ext_derby_table VALUES(false, 10, 100, 2.63, 999.9999, '2019-11-11', 'test', 'test1', 'a', '2019-01-01 00:00:00', 1.7899, 1, 2), + (true, 100, 1000, 2.632, 9999.99999, '2019-12-11', 'test_1', 'test1_1', 'b', '2019-02-01 01:00:01', 5.7899, 3, 4), + (false, 10, 999, 23.632, 99999.99999, '2019-09-11', 'test_2', 'test1_2', 'c', '2019-03-01 11:00:01', 9.7899, 5, 6); + +INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl; +select * from insert_to_ext_derby_table; diff --git a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out index 3c043f544f..567c524fd4 100644 --- a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out +++ b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out @@ -360,3 +360,206 @@ db_id bigint from deserializer name string from deserializer type string from deserializer #### A masked pattern was here #### +PREHOOK: query: FROM src +#### A masked pattern was here #### +'CREATE TABLE INSERT_TO_DERBY_TABLE (a BOOLEAN, b INTEGER, c BIGINT, d FLOAT, e DOUBLE, f DATE, g VARCHAR(27), + h VARCHAR(27), i CHAR(2), j TIMESTAMP, k DECIMAL(5,4), l SMALLINT, m SMALLINT)' ) +limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: FROM src +#### A masked pattern was here #### +'CREATE TABLE INSERT_TO_DERBY_TABLE (a BOOLEAN, b INTEGER, c BIGINT, d FLOAT, e DOUBLE, f DATE, g VARCHAR(27), + h VARCHAR(27), i CHAR(2), j TIMESTAMP, k DECIMAL(5,4), l SMALLINT, m SMALLINT)' ) +limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0 +PREHOOK: query: CREATE EXTERNAL TABLE insert_to_ext_derby_table +( + a BOOLEAN, + b INTEGER, + c BIGINT, + d FLOAT, + e DOUBLE, + f DATE, + g VARCHAR(27), + h STRING, + i CHAR(2), + j TIMESTAMP, + k DECIMAL(5,4), + l TINYINT, + m SMALLINT + ) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", +#### A masked pattern was here #### + "hive.sql.dbcp.username" = "APP", + "hive.sql.dbcp.password" = "mine", + "hive.sql.table" = "INSERT_TO_DERBY_TABLE", + "hive.sql.dbcp.maxActive" = "1" +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@insert_to_ext_derby_table +POSTHOOK: query: CREATE EXTERNAL TABLE insert_to_ext_derby_table +( + a BOOLEAN, + b INTEGER, + c BIGINT, + d FLOAT, + e DOUBLE, + f DATE, + g VARCHAR(27), + h STRING, + i CHAR(2), + j TIMESTAMP, + k DECIMAL(5,4), + l TINYINT, + m SMALLINT + ) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", +#### A masked pattern was here #### + "hive.sql.dbcp.username" = "APP", + "hive.sql.dbcp.password" = "mine", + "hive.sql.table" = "INSERT_TO_DERBY_TABLE", + "hive.sql.dbcp.maxActive" = "1" +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@insert_to_ext_derby_table +PREHOOK: query: CREATE TABLE test_insert_tbl +( + a BOOLEAN, + b INTEGER, + c BIGINT, + d FLOAT, + e DOUBLE, + f DATE, + g VARCHAR(27), + h STRING, + i CHAR(2), + j TIMESTAMP, + k DECIMAL(5,4), + l TINYINT, + m SMALLINT + ) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_insert_tbl +POSTHOOK: query: CREATE TABLE test_insert_tbl +( + a BOOLEAN, + b INTEGER, + c BIGINT, + d FLOAT, + e DOUBLE, + f DATE, + g VARCHAR(27), + h STRING, + i CHAR(2), + j TIMESTAMP, + k DECIMAL(5,4), + l TINYINT, + m SMALLINT + ) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_insert_tbl +PREHOOK: query: INSERT INTO test_insert_tbl VALUES(true, 342, 8900, 9.63, 1099.9999, '2019-04-11', 'abcd', 'efgh', 'k', '2019-05-01 00:00:00', 1.8899, 1, 2) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test_insert_tbl +POSTHOOK: query: INSERT INTO test_insert_tbl VALUES(true, 342, 8900, 9.63, 1099.9999, '2019-04-11', 'abcd', 'efgh', 'k', '2019-05-01 00:00:00', 1.8899, 1, 2) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test_insert_tbl +POSTHOOK: Lineage: test_insert_tbl.a SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.b SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.c SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.d SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.e SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.f SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.g SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.h SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.i SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.j SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.k SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.l SCRIPT [] +POSTHOOK: Lineage: test_insert_tbl.m SCRIPT [] +PREHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(true, 10, 100, 2.63, 999.9999, '2019-01-11', 'test', 'test1', 'z', '2019-01-01 00:00:00', 1.7899, 1, 2) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@insert_to_ext_derby_table +POSTHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(true, 10, 100, 2.63, 999.9999, '2019-01-11', 'test', 'test1', 'z', '2019-01-01 00:00:00', 1.7899, 1, 2) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@insert_to_ext_derby_table +PREHOOK: query: INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl +PREHOOK: type: QUERY +PREHOOK: Input: default@test_insert_tbl +PREHOOK: Output: default@insert_to_ext_derby_table +POSTHOOK: query: INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_insert_tbl +POSTHOOK: Output: default@insert_to_ext_derby_table +PREHOOK: query: select * from insert_to_ext_derby_table +PREHOOK: type: QUERY +PREHOOK: Input: default@insert_to_ext_derby_table +#### A masked pattern was here #### +POSTHOOK: query: select * from insert_to_ext_derby_table +POSTHOOK: type: QUERY +POSTHOOK: Input: default@insert_to_ext_derby_table +#### A masked pattern was here #### +true 10 100 2.63 999.9999 2019-01-11 test test1 z 2019-01-01 00:00:00 1.7899 1 2 +true 342 8900 9.63 1099.9999 2019-04-11 abcd efgh k 2019-05-01 00:00:00 1.8899 1 2 +PREHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(false, 324, 53465, 2.6453, 599.9999, '2019-04-11', 'fgeg', 'asda', 'k', '2019-03-01 10:00:00', 1.7899, 1, 2) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@insert_to_ext_derby_table +POSTHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(false, 324, 53465, 2.6453, 599.9999, '2019-04-11', 'fgeg', 'asda', 'k', '2019-03-01 10:00:00', 1.7899, 1, 2) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@insert_to_ext_derby_table +PREHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(false, 10, 100, 2.63, 999.9999, '2019-11-11', 'test', 'test1', 'a', '2019-01-01 00:00:00', 1.7899, 1, 2), + (true, 100, 1000, 2.632, 9999.99999, '2019-12-11', 'test_1', 'test1_1', 'b', '2019-02-01 01:00:01', 5.7899, 3, 4), + (false, 10, 999, 23.632, 99999.99999, '2019-09-11', 'test_2', 'test1_2', 'c', '2019-03-01 11:00:01', 9.7899, 5, 6) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@insert_to_ext_derby_table +POSTHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(false, 10, 100, 2.63, 999.9999, '2019-11-11', 'test', 'test1', 'a', '2019-01-01 00:00:00', 1.7899, 1, 2), + (true, 100, 1000, 2.632, 9999.99999, '2019-12-11', 'test_1', 'test1_1', 'b', '2019-02-01 01:00:01', 5.7899, 3, 4), + (false, 10, 999, 23.632, 99999.99999, '2019-09-11', 'test_2', 'test1_2', 'c', '2019-03-01 11:00:01', 9.7899, 5, 6) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@insert_to_ext_derby_table +PREHOOK: query: INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl +PREHOOK: type: QUERY +PREHOOK: Input: default@test_insert_tbl +PREHOOK: Output: default@insert_to_ext_derby_table +POSTHOOK: query: INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test_insert_tbl +POSTHOOK: Output: default@insert_to_ext_derby_table +PREHOOK: query: select * from insert_to_ext_derby_table +PREHOOK: type: QUERY +PREHOOK: Input: default@insert_to_ext_derby_table +#### A masked pattern was here #### +POSTHOOK: query: select * from insert_to_ext_derby_table +POSTHOOK: type: QUERY +POSTHOOK: Input: default@insert_to_ext_derby_table +#### A masked pattern was here #### +true 10 100 2.63 999.9999 2019-01-11 test test1 z 2019-01-01 00:00:00 1.7899 1 2 +true 342 8900 9.63 1099.9999 2019-04-11 abcd efgh k 2019-05-01 00:00:00 1.8899 1 2 +false 324 53465 2.6453 599.9999 2019-04-11 fgeg asda k 2019-03-01 10:00:00 1.7899 1 2 +false 10 100 2.63 999.9999 2019-11-11 test test1 a 2019-01-01 00:00:00 1.7899 1 2 +true 100 1000 2.632 9999.99999 2019-12-11 test_1 test1_1 b 2019-02-01 01:00:01 5.7899 3 4 +false 10 999 23.632 99999.99999 2019-09-11 test_2 test1_2 c 2019-03-01 11:00:01 9.7899 5 6 +true 342 8900 9.63 1099.9999 2019-04-11 abcd efgh k 2019-05-01 00:00:00 1.8899 1 2