diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 348cc30..5d3e9c2 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -1879,7 +1879,7 @@ private Partition append_partition_common(RawStore ms, String dbName, String tab firePreEvent(new PreAddPartitionEvent(tbl, part, this)); - part.setSd(tbl.getSd()); + part.setSd(tbl.getSd().deepCopy()); partLocation = new Path(tbl.getSd().getLocation(), Warehouse .makePartName(tbl.getPartitionKeys(), part_vals)); part.getSd().setLocation(partLocation.toString()); diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java index 7752cac..deebb9b 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java @@ -52,6 +52,7 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; import org.apache.hadoop.hive.metastore.api.UnknownDBException; diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java index b9e7fd1..d30e2a0 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java @@ -640,12 +640,14 @@ static StorageDescriptor deserializeStorageDescriptor(byte[] serialized) } static void assembleStorageDescriptor(StorageDescriptor sd, StorageDescriptorParts parts) { - sd.setLocation(parts.location); - sd.setParameters(parts.parameters); + SharedStorageDescriptor ssd = new SharedStorageDescriptor(); + ssd.setLocation(parts.location); + ssd.setParameters(parts.parameters); + ssd.setShared(sd); if (parts.containingPartition != null) { - parts.containingPartition.setSd(sd); + parts.containingPartition.setSd(ssd); } else if (parts.containingTable != null) { - parts.containingTable.setSd(sd); + parts.containingTable.setSd(ssd); } else { throw new RuntimeException("Need either a partition or a table"); } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java index 12fea80..f5e9168 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java @@ -26,7 +26,8 @@ import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -38,7 +39,11 @@ */ public class SharedStorageDescriptor extends StorageDescriptor { static final private Log LOG = LogFactory.getLog(SharedStorageDescriptor.class.getName()); - StorageDescriptor shared; + private StorageDescriptor shared; + private boolean copied = false; + private CopyOnWriteColList colList = null; + private CopyOnWriteOrderList orderList = null; + private CopyOnWriteBucketList bucketList = null; SharedStorageDescriptor() { } @@ -49,13 +54,20 @@ public SharedStorageDescriptor(SharedStorageDescriptor that) { this.shared = that.shared; } - void readShared(byte[] hash) throws IOException { - shared = HBaseReadWrite.getInstance().getStorageDescriptor(hash); + @Override + public StorageDescriptor deepCopy() { + return new SharedStorageDescriptor(this); + } + + @Override + public boolean isSetCols() { + return shared.isSetCols(); } @Override public List getCols() { - return shared.getCols(); + return copied ? shared.getCols() : ( + shared.getCols() == null ? null : copyCols(shared.getCols())); } @Override @@ -69,33 +81,143 @@ public int getColsSize() { } @Override + public void setCols(List cols) { + copyOnWrite(); + shared.setCols(cols); + } + + @Override + public void addToCols(FieldSchema fs) { + copyOnWrite(); + shared.addToCols(fs); + } + + @Override + public void unsetCols() { + copyOnWrite(); + shared.unsetCols(); + } + + @Override + public boolean isSetInputFormat() { + return shared.isSetInputFormat(); + } + + @Override public String getInputFormat() { return shared.getInputFormat(); } @Override + public void setInputFormat(String inputFormat) { + copyOnWrite(); + shared.setInputFormat(inputFormat); + } + + @Override + public void unsetInputFormat() { + copyOnWrite(); + shared.unsetInputFormat(); + } + + @Override + public boolean isSetOutputFormat() { + return shared.isSetOutputFormat(); + } + + @Override public String getOutputFormat() { return shared.getOutputFormat(); } @Override + public void setOutputFormat(String outputFormat) { + copyOnWrite(); + shared.setOutputFormat(outputFormat); + } + + @Override + public void unsetOutputFormat() { + copyOnWrite(); + shared.unsetOutputFormat(); + } + + @Override + public boolean isSetCompressed() { + return shared.isSetCompressed(); + } + + @Override public boolean isCompressed() { return shared.isCompressed(); } @Override + public void setCompressed(boolean isCompressed) { + copyOnWrite(); + shared.setCompressed(isCompressed); + } + + @Override + public void unsetCompressed() { + copyOnWrite(); + shared.unsetCompressed(); + } + + @Override + public boolean isSetNumBuckets() { + return shared.isSetNumBuckets(); + } + + @Override public int getNumBuckets() { return shared.getNumBuckets(); } @Override + public void setNumBuckets(int numBuckets) { + copyOnWrite(); + shared.setNumBuckets(numBuckets); + } + + @Override + public void unsetNumBuckets() { + copyOnWrite(); + shared.unsetNumBuckets(); + } + + @Override + public boolean isSetSerdeInfo() { + return shared.isSetSerdeInfo(); + } + + @Override public SerDeInfo getSerdeInfo() { - return shared.getSerdeInfo(); + return copied ? shared.getSerdeInfo() : ( + shared.getSerdeInfo() == null ? null : new SerDeInfoWrapper(shared.getSerdeInfo())); + } + + @Override + public void setSerdeInfo(SerDeInfo serdeInfo) { + copyOnWrite(); + shared.setSerdeInfo(serdeInfo); + } + + @Override + public void unsetSerdeInfo() { + copyOnWrite(); + shared.unsetSerdeInfo(); + } + + @Override + public boolean isSetBucketCols() { + return shared.isSetBucketCols(); } @Override public List getBucketCols() { - return shared.getBucketCols(); + return copied ? shared.getBucketCols() : ( + shared.getBucketCols() == null ? null : copyBucketCols(shared.getBucketCols())); } @Override @@ -109,8 +231,32 @@ public int getBucketColsSize() { } @Override + public void setBucketCols(List bucketCols) { + copyOnWrite(); + shared.setBucketCols(bucketCols); + } + + @Override + public void addToBucketCols(String bucketCol) { + copyOnWrite(); + shared.addToBucketCols(bucketCol); + } + + @Override + public void unsetBucketCols() { + copyOnWrite(); + shared.unsetBucketCols(); + } + + @Override + public boolean isSetSortCols() { + return shared.isSetSortCols(); + } + + @Override public List getSortCols() { - return shared.getSortCols(); + return copied ? shared.getSortCols() : ( + shared.getSortCols() == null ? null : copySort(shared.getSortCols())); } @Override @@ -124,12 +270,508 @@ public int getSortColsSize() { } @Override + public void setSortCols(List sortCols) { + copyOnWrite(); + shared.setSortCols(sortCols); + } + + @Override + public void addToSortCols(Order sortCol) { + copyOnWrite(); + shared.addToSortCols(sortCol); + } + + @Override + public void unsetSortCols() { + copyOnWrite(); + shared.unsetSortCols(); + } + + @Override + public boolean isSetSkewedInfo() { + return shared.isSetSkewedInfo(); + } + + @Override public SkewedInfo getSkewedInfo() { - return shared.getSkewedInfo(); + return copied ? shared.getSkewedInfo() : ( + shared.getSkewedInfo() == null ? null : new SkewWrapper(shared.getSkewedInfo())); + } + + @Override + public void setSkewedInfo(SkewedInfo skewedInfo) { + copyOnWrite(); + shared.setSkewedInfo(skewedInfo); + } + + @Override + public void unsetSkewedInfo() { + copyOnWrite(); + shared.unsetSkewedInfo(); + } + + @Override + public boolean isSetStoredAsSubDirectories() { + return shared.isSetStoredAsSubDirectories(); } @Override public boolean isStoredAsSubDirectories() { return shared.isStoredAsSubDirectories(); } + + @Override + public void setStoredAsSubDirectories(boolean sasd) { + copyOnWrite(); + shared.setStoredAsSubDirectories(sasd); + } + + @Override + public void unsetStoredAsSubDirectories() { + copyOnWrite(); + shared.unsetStoredAsSubDirectories(); + } + + void setShared(StorageDescriptor sd) { + shared = sd; + } + + StorageDescriptor getShared() { + return shared; + } + + private void copyOnWrite() { + if (!copied) { + shared = new StorageDescriptor(shared); + copied = true; + } + } + + private class SerDeInfoWrapper extends SerDeInfo { + + SerDeInfoWrapper(SerDeInfo serde) { + super(serde); + } + + @Override + public void setName(String name) { + copyOnWrite(); + shared.getSerdeInfo().setName(name); + } + + @Override + public void unsetName() { + copyOnWrite(); + shared.getSerdeInfo().unsetName(); + } + + @Override + public void setSerializationLib(String lib) { + copyOnWrite(); + shared.getSerdeInfo().setSerializationLib(lib); + } + + @Override + public void unsetSerializationLib() { + copyOnWrite(); + shared.getSerdeInfo().unsetSerializationLib(); + } + + @Override + public void setParameters(Map parameters) { + copyOnWrite(); + shared.getSerdeInfo().setParameters(parameters); + } + + @Override + public void unsetParameters() { + copyOnWrite(); + shared.getSerdeInfo().unsetParameters(); + } + + @Override + public void putToParameters(String key, String value) { + copyOnWrite(); + shared.getSerdeInfo().putToParameters(key, value); + } + } + + private class SkewWrapper extends SkewedInfo { + SkewWrapper(SkewedInfo skew) { + super(skew); + } + + @Override + public void setSkewedColNames(List skewedColNames) { + copyOnWrite(); + shared.getSkewedInfo().setSkewedColNames(skewedColNames); + } + + @Override + public void unsetSkewedColNames() { + copyOnWrite(); + shared.getSkewedInfo().unsetSkewedColNames(); + } + + @Override + public void addToSkewedColNames(String skewCol) { + copyOnWrite(); + shared.getSkewedInfo().addToSkewedColNames(skewCol); + } + + @Override + public void setSkewedColValues(List> skewedColValues) { + copyOnWrite(); + shared.getSkewedInfo().setSkewedColValues(skewedColValues); + } + + @Override + public void unsetSkewedColValues() { + copyOnWrite(); + shared.getSkewedInfo().unsetSkewedColValues(); + } + + @Override + public void addToSkewedColValues(List skewedColValue) { + copyOnWrite(); + shared.getSkewedInfo().addToSkewedColValues(skewedColValue); + } + + @Override + public void setSkewedColValueLocationMaps(Map, String> maps) { + copyOnWrite(); + shared.getSkewedInfo().setSkewedColValueLocationMaps(maps); + } + + @Override + public void unsetSkewedColValueLocationMaps() { + copyOnWrite(); + shared.getSkewedInfo().unsetSkewedColValueLocationMaps(); + } + + @Override + public void putToSkewedColValueLocationMaps(List key, String value) { + copyOnWrite(); + shared.getSkewedInfo().putToSkewedColValueLocationMaps(key, value); + } + } + + private CopyOnWriteOrderList copySort(List sort) { + if (orderList == null) { + orderList = new CopyOnWriteOrderList(sort.size()); + for (int i = 0; i < sort.size(); i++) { + orderList.secretAdd(new OrderWrapper(i, sort.get(i))); + } + } + return orderList; + } + + private class CopyOnWriteOrderList extends ArrayList { + + CopyOnWriteOrderList(int size) { + super(size); + } + + private void secretAdd(OrderWrapper order) { + super.add(order); + } + + @Override + public boolean add(Order t) { + copyOnWrite(); + return shared.getSortCols().add(t); + } + + @Override + public boolean remove(Object o) { + copyOnWrite(); + return shared.getSortCols().remove(o); + } + + @Override + public boolean addAll(Collection c) { + copyOnWrite(); + return shared.getSortCols().addAll(c); + } + + @Override + public boolean addAll(int index, Collection c) { + copyOnWrite(); + return shared.getSortCols().addAll(c); + } + + @Override + public boolean removeAll(Collection c) { + copyOnWrite(); + return shared.getSortCols().removeAll(c); + } + + @Override + public boolean retainAll(Collection c) { + copyOnWrite(); + return shared.getSortCols().retainAll(c); + } + + @Override + public void clear() { + copyOnWrite(); + shared.getSortCols().clear(); + } + + @Override + public Order set(int index, Order element) { + copyOnWrite(); + return shared.getSortCols().set(index, element); + } + + @Override + public void add(int index, Order element) { + copyOnWrite(); + shared.getSortCols().add(index, element); + } + + @Override + public Order remove(int index) { + copyOnWrite(); + return shared.getSortCols().remove(index); + } + } + + private class OrderWrapper extends Order { + final private int pos; + + OrderWrapper(int pos, Order order) { + super(order); + this.pos = pos; + } + + @Override + public void setCol(String col) { + copyOnWrite(); + shared.getSortCols().get(pos).setCol(col); + } + + @Override + public void unsetCol() { + copyOnWrite(); + shared.getSortCols().get(pos).unsetCol(); + } + + @Override + public void setOrder(int order) { + copyOnWrite(); + shared.getSortCols().get(pos).setOrder(order); + } + + @Override + public void unsetOrder() { + copyOnWrite(); + shared.getSortCols().get(pos).unsetOrder(); + } + } + + private CopyOnWriteColList copyCols(List cols) { + if (colList == null) { + colList = new CopyOnWriteColList(cols.size()); + for (int i = 0; i < cols.size(); i++) { + colList.secretAdd(new FieldSchemaWrapper(i, cols.get(i))); + } + } + return colList; + } + + private class CopyOnWriteColList extends ArrayList { + + CopyOnWriteColList(int size) { + super(size); + } + + private void secretAdd(FieldSchemaWrapper col) { + super.add(col); + } + + @Override + public boolean add(FieldSchema t) { + copyOnWrite(); + return shared.getCols().add(t); + } + + @Override + public boolean remove(Object o) { + copyOnWrite(); + return shared.getCols().remove(o); + } + + @Override + public boolean addAll(Collection c) { + copyOnWrite(); + return shared.getCols().addAll(c); + } + + @Override + public boolean addAll(int index, Collection c) { + copyOnWrite(); + return shared.getCols().addAll(c); + } + + @Override + public boolean removeAll(Collection c) { + copyOnWrite(); + return shared.getCols().removeAll(c); + } + + @Override + public boolean retainAll(Collection c) { + copyOnWrite(); + return shared.getCols().retainAll(c); + } + + @Override + public void clear() { + copyOnWrite(); + shared.getCols().clear(); + } + + @Override + public FieldSchema set(int index, FieldSchema element) { + copyOnWrite(); + return shared.getCols().set(index, element); + } + + @Override + public void add(int index, FieldSchema element) { + copyOnWrite(); + shared.getCols().add(index, element); + } + + @Override + public FieldSchema remove(int index) { + copyOnWrite(); + return shared.getCols().remove(index); + } + } + + private class FieldSchemaWrapper extends FieldSchema { + final private int pos; + + FieldSchemaWrapper(int pos, FieldSchema col) { + super(col); + this.pos = pos; + } + + @Override + public void setName(String name) { + copyOnWrite(); + shared.getCols().get(pos).setName(name); + } + + @Override + public void unsetName() { + copyOnWrite(); + shared.getCols().get(pos).unsetName(); + } + + @Override + public void setType(String type) { + copyOnWrite(); + shared.getCols().get(pos).setType(type); + } + + @Override + public void unsetType() { + copyOnWrite(); + shared.getCols().get(pos).unsetType(); + } + + @Override + public void setComment(String comment) { + copyOnWrite(); + shared.getCols().get(pos).setComment(comment); + } + + @Override + public void unsetComment() { + copyOnWrite(); + shared.getCols().get(pos).unsetComment(); + } + } + + private CopyOnWriteBucketList copyBucketCols(List cols) { + if (bucketList == null) { + bucketList = new CopyOnWriteBucketList(cols); + } + return bucketList; + } + + private class CopyOnWriteBucketList extends ArrayList { + + CopyOnWriteBucketList(Collection c) { + super(c); + } + + private void secretAdd(String col) { + super.add(col); + } + + @Override + public boolean add(String t) { + copyOnWrite(); + return shared.getBucketCols().add(t); + } + + @Override + public boolean remove(Object o) { + copyOnWrite(); + return shared.getBucketCols().remove(o); + } + + @Override + public boolean addAll(Collection c) { + copyOnWrite(); + return shared.getBucketCols().addAll(c); + } + + @Override + public boolean addAll(int index, Collection c) { + copyOnWrite(); + return shared.getBucketCols().addAll(c); + } + + @Override + public boolean removeAll(Collection c) { + copyOnWrite(); + return shared.getBucketCols().removeAll(c); + } + + @Override + public boolean retainAll(Collection c) { + copyOnWrite(); + return shared.getBucketCols().retainAll(c); + } + + @Override + public void clear() { + copyOnWrite(); + shared.getBucketCols().clear(); + } + + @Override + public String set(int index, String element) { + copyOnWrite(); + return shared.getBucketCols().set(index, element); + } + + @Override + public void add(int index, String element) { + copyOnWrite(); + shared.getBucketCols().add(index, element); + } + + @Override + public String remove(int index) { + copyOnWrite(); + return shared.getBucketCols().remove(index); + } + } + } diff --git metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java new file mode 100644 index 0000000..c9d1eef --- /dev/null +++ metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java @@ -0,0 +1,127 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore.hbase; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.api.Order; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.SkewedInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.junit.Assert; +import org.junit.Test; + +import java.util.List; + + +/** + * + */ +public class TestSharedStorageDescriptor { + private static final Log LOG = LogFactory.getLog(TestHBaseStore.class.getName()); + + + @Test + public void location() { + StorageDescriptor sd = new StorageDescriptor(); + SharedStorageDescriptor ssd = new SharedStorageDescriptor(); + ssd.setLocation("here"); + ssd.setShared(sd); + ssd.setLocation("there"); + Assert.assertTrue(sd == ssd.getShared()); + } + + @Test + public void changeOnInputFormat() { + StorageDescriptor sd = new StorageDescriptor(); + sd.setInputFormat("input"); + SharedStorageDescriptor ssd = new SharedStorageDescriptor(); + ssd.setShared(sd); + Assert.assertEquals("input", ssd.getInputFormat()); + ssd.setInputFormat("different"); + Assert.assertFalse(sd == ssd.getShared()); + Assert.assertEquals("input", sd.getInputFormat()); + Assert.assertEquals("different", ssd.getInputFormat()); + Assert.assertEquals("input", sd.getInputFormat()); + } + + @Test + public void changeOnSerde() { + StorageDescriptor sd = new StorageDescriptor(); + SerDeInfo serde = new SerDeInfo(); + serde.setName("serde"); + sd.setSerdeInfo(serde); + SharedStorageDescriptor ssd = new SharedStorageDescriptor(); + ssd.setShared(sd); + Assert.assertEquals("serde", ssd.getSerdeInfo().getName()); + ssd.getSerdeInfo().setName("different"); + Assert.assertFalse(sd == ssd.getShared()); + Assert.assertEquals("serde", serde.getName()); + Assert.assertEquals("different", ssd.getSerdeInfo().getName()); + Assert.assertEquals("serde", sd.getSerdeInfo().getName()); + } + + @Test + public void multipleChangesDontCauseMultipleCopies() { + StorageDescriptor sd = new StorageDescriptor(); + sd.setInputFormat("input"); + sd.setOutputFormat("output"); + SharedStorageDescriptor ssd = new SharedStorageDescriptor(); + ssd.setShared(sd); + Assert.assertEquals("input", ssd.getInputFormat()); + ssd.setInputFormat("different"); + Assert.assertFalse(sd == ssd.getShared()); + Assert.assertEquals("input", sd.getInputFormat()); + Assert.assertEquals("different", ssd.getInputFormat()); + StorageDescriptor keep = ssd.getShared(); + ssd.setOutputFormat("different_output"); + Assert.assertEquals("different", ssd.getInputFormat()); + Assert.assertEquals("different_output", ssd.getOutputFormat()); + Assert.assertEquals("output", sd.getOutputFormat()); + Assert.assertTrue(keep == ssd.getShared()); + } + + @Test + public void changeOrder() { + StorageDescriptor sd = new StorageDescriptor(); + sd.addToSortCols(new Order("fred", 1)); + SharedStorageDescriptor ssd = new SharedStorageDescriptor(); + ssd.setShared(sd); + Assert.assertEquals(1, ssd.getSortCols().get(0).getOrder()); + ssd.getSortCols().get(0).setOrder(2); + Assert.assertFalse(sd == ssd.getShared()); + Assert.assertEquals(2, ssd.getSortCols().get(0).getOrder()); + Assert.assertEquals(1, sd.getSortCols().get(0).getOrder()); + } + + @Test + public void changeOrderList() { + StorageDescriptor sd = new StorageDescriptor(); + sd.addToSortCols(new Order("fred", 1)); + SharedStorageDescriptor ssd = new SharedStorageDescriptor(); + ssd.setShared(sd); + Assert.assertEquals(1, ssd.getSortCols().get(0).getOrder()); + List list = ssd.getSortCols(); + list.add(new Order("bob", 2)); + Assert.assertFalse(sd == ssd.getShared()); + Assert.assertEquals(2, ssd.getSortColsSize()); + Assert.assertEquals(1, sd.getSortColsSize()); + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index 8331a49..601cf0c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -39,17 +39,12 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import org.apache.hadoop.hive.metastore.hbase.SharedStorageDescriptor; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.OutputFormat; -import org.apache.thrift.TException; -import org.apache.thrift.protocol.TBinaryProtocol; -import org.apache.thrift.transport.TMemoryBuffer; /** * A Hive Table Partition: is a fundamental storage unit within a Table. @@ -96,7 +91,7 @@ public Partition(Table tbl) throws HiveException { org.apache.hadoop.hive.metastore.api.Partition tPart = new org.apache.hadoop.hive.metastore.api.Partition(); if (!tbl.isView()) { - tPart.setSd(tbl.getTTable().getSd()); // TODO: get a copy + tPart.setSd(tbl.getTTable().getSd().deepCopy()); } initialize(tbl, tPart); } @@ -141,37 +136,13 @@ public Partition(Table tbl, Map partSpec, Path location) throws tpart.setValues(pvals); if (!tbl.isView()) { - tpart.setSd(cloneSd(tbl)); + tpart.setSd(tbl.getSd().deepCopy()); tpart.getSd().setLocation((location != null) ? location.toString() : null); } return tpart; } /** - * We already have methods that clone stuff using XML or Kryo. - * And now for something completely different - let's clone SD using Thrift! - * Refactored into a method. - */ - public static StorageDescriptor cloneSd(Table tbl) throws HiveException { - if (tbl.getSd() instanceof SharedStorageDescriptor) { - return new SharedStorageDescriptor((SharedStorageDescriptor)tbl.getSd()); - } - // What is the point of this? Why not just use the copy constructor in StorageDescriptor? - StorageDescriptor sd = new StorageDescriptor(); - try { - // replace with THRIFT-138 - TMemoryBuffer buffer = new TMemoryBuffer(1024); - TBinaryProtocol prot = new TBinaryProtocol(buffer); - tbl.getTTable().getSd().write(prot); - sd.read(prot); - } catch (TException e) { - LOG.error("Could not create a copy of StorageDescription"); - throw new HiveException("Could not create a copy of StorageDescription",e); - } - return sd; - } - - /** * Initializes this object with the given variables * * @param table diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java index fc9d0bd..5f70010 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java @@ -415,7 +415,7 @@ public PartitionWrapper(org.apache.hadoop.hive.metastore.api.Partition mapiPart, // location or an SD, but these are needed to create a ql.metadata.Partition, // so we use the table's SD. The only place this is used is by the // authorization hooks, so we will not affect code flow in the metastore itself. - wrapperApiPart.setSd(t.getSd()); + wrapperApiPart.setSd(t.getSd().deepCopy()); } initialize(new TableWrapper(t),wrapperApiPart); } diff --git ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java index c0fd4b3..5a271e5 100644 --- ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java +++ ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java @@ -257,7 +257,7 @@ private void addPartition(HiveMetaStoreClient client, Table table, part.setTableName(table.getTableName()); part.setValues(vals); part.setParameters(new HashMap()); - part.setSd(table.getSd()); + part.setSd(table.getSd().deepCopy()); part.getSd().setSerdeInfo(table.getSd().getSerdeInfo()); part.getSd().setLocation(table.getSd().getLocation() + location);