diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java index 46506a5..119e5aa 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java @@ -27,7 +27,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.File; import java.io.IOException; /** @@ -181,5 +180,17 @@ public void grant() throws Exception { Assert.assertEquals(0, rsp.getResponseCode()); } + @Test + public void describeNonpartitionedTable() throws Exception { + CommandProcessorResponse rsp = driver.run("create table alter1(a int, b int)"); + Assert.assertEquals(0, rsp.getResponseCode()); + rsp = driver.run("describe extended alter1"); + Assert.assertEquals(0, rsp.getResponseCode()); + rsp = driver.run("alter table alter1 set serdeproperties('s1'='9')"); + Assert.assertEquals(0, rsp.getResponseCode()); + rsp = driver.run("describe extended alter1"); + Assert.assertEquals(0, rsp.getResponseCode()); + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 8bcf860..a4dc35b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -18,36 +18,6 @@ package org.apache.hadoop.hive.ql.exec; -import static org.apache.commons.lang.StringUtils.join; -import static org.apache.hadoop.util.StringUtils.stringifyException; - -import java.io.BufferedWriter; -import java.io.DataOutputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.io.Serializable; -import java.io.Writer; -import java.net.URI; -import java.net.URISyntaxException; -import java.sql.SQLException; -import java.util.AbstractList; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeMap; -import java.util.TreeSet; - import com.google.common.collect.Iterables; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; @@ -216,6 +186,36 @@ import org.apache.hive.common.util.ReflectionUtil; import org.stringtemplate.v4.ST; +import java.io.BufferedWriter; +import java.io.DataOutputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Serializable; +import java.io.Writer; +import java.net.URI; +import java.net.URISyntaxException; +import java.sql.SQLException; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeMap; +import java.util.TreeSet; + +import static org.apache.commons.lang.StringUtils.join; +import static org.apache.hadoop.util.StringUtils.stringifyException; + /** * DDLTask implementation. * @@ -3391,7 +3391,9 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { } } - Table oldTbl = tbl.copy(); + // Don't change the table object returned by the metastore, as we'll mess with it's caches. + Table oldTbl = tbl; + tbl = oldTbl.copy(); if (allPartitions != null) { // Alter all partitions for (Partition part : allPartitions) {