diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java index a9ab90bca8..3733e3d02f 100644 --- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java +++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java @@ -2101,7 +2101,7 @@ public static void dropDB(IMetaStoreClient client, String databaseName) { ///////// -------- UTILS ------- ///////// // returns Path of the partition created (if any) else Path of table - public static Path createDbAndTable(IDriver driver, String databaseName, + private static Path createDbAndTable(IDriver driver, String databaseName, String tableName, List partVals, String[] colNames, String[] colTypes, String[] bucketCols, @@ -2147,7 +2147,7 @@ private static Path getPartitionPath(IDriver driver, String tableName, String pa private static String getTableColumnsStr(String[] colNames, String[] colTypes) { StringBuilder sb = new StringBuilder(); for (int i=0; i < colNames.length; ++i) { - sb.append(colNames[i] + " " + colTypes[i]); + sb.append(colNames[i]).append(" ").append(colTypes[i]); if (i partVals) { StringBuilder sb = new StringBuilder(); for (int i=0; i < partVals.size(); ++i) { - sb.append(partNames[i] + " = '" + partVals.get(i) + "'"); + sb.append(partNames[i]).append(" = '").append(partVals.get(i)).append("'"); if(i < partVals.size()-1) { sb.append(","); } @@ -2217,7 +2217,7 @@ private static boolean runDDL(IDriver driver, String sql) throws QueryFailedExce } - public static ArrayList queryTable(IDriver driver, String query) throws IOException { + private static ArrayList queryTable(IDriver driver, String query) throws IOException { CommandProcessorResponse cpr = driver.run(query); if(cpr.getResponseCode() != 0) { throw new RuntimeException(query + " failed: " + cpr); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 08d26dc14e..067bb390df 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -3507,29 +3507,33 @@ private void handleTransactionalTable(Table tab, AddPartitionDesc addPartitionDe if(!AcidUtils.isTransactionalTable(tab)) { return; } - Long writeId; - int stmtId; - try { - writeId = SessionState.get().getTxnMgr().getTableWriteId(tab.getDbName(), - tab.getTableName()); - } catch (LockException ex) { - throw new SemanticException("Failed to allocate the write id", ex); - } - stmtId = SessionState.get().getTxnMgr().getStmtIdAndIncrement(); + Long writeId = null; + int stmtId = 0; for (int index = 0; index < addPartitionDesc.getPartitionCount(); index++) { OnePartitionDesc desc = addPartitionDesc.getPartition(index); if (desc.getLocation() != null) { if(addPartitionDesc.isIfNotExists()) { - //Don't add + //Don't add partition data if it already exists Partition oldPart = getPartition(tab, desc.getPartSpec(), false); if(oldPart != null) { continue; } } + if(writeId == null) { + //so that we only allocate a writeId only if actually adding data + // (vs. adding a partition w/o data) + try { + writeId = SessionState.get().getTxnMgr().getTableWriteId(tab.getDbName(), + tab.getTableName()); + } catch (LockException ex) { + throw new SemanticException("Failed to allocate the write id", ex); + } + stmtId = SessionState.get().getTxnMgr().getStmtIdAndIncrement(); + } LoadTableDesc loadTableWork = new LoadTableDesc(new Path(desc.getLocation()), Utilities.getTableDesc(tab), desc.getPartSpec(), - LoadTableDesc.LoadFileType.KEEP_EXISTING,//not relevant - creating new partition + LoadTableDesc.LoadFileType.KEEP_EXISTING, //not relevant - creating new partition writeId); loadTableWork.setStmtId(stmtId); loadTableWork.setInheritTableSpecs(true); @@ -3538,7 +3542,8 @@ private void handleTransactionalTable(Table tab, AddPartitionDesc addPartitionDe Warehouse.makePartPath(desc.getPartSpec())).toString()); } catch (MetaException ex) { - throw new SemanticException("Could not determine partition path due to: " + ex.getMessage(), ex); + throw new SemanticException("Could not determine partition path due to: " + + ex.getMessage(), ex); } Task moveTask = TaskFactory.get( new MoveWork(getInputs(), getOutputs(), loadTableWork, null,