diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java index 3bcc5c0..20362e5 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.avro.AvroObjectInspectorGenerator; -import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import org.apache.hadoop.hive.serde2.lazy.LazyFactory; import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector; @@ -215,16 +215,17 @@ public static void generateColumnTypes(Properties tbl, List colum // for avro type, the serialization class parameter is optional schemaLiteral = tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "." - + AvroSerdeUtils.SCHEMA_LITERAL); + + AvroTableProperties.SCHEMA_LITERAL.getPropName()); schemaUrl = tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "." - + AvroSerdeUtils.SCHEMA_URL); + + AvroTableProperties.SCHEMA_URL.getPropName()); if (schemaLiteral == null && schemaUrl == null) { // either schema literal, schema url or serialization class must // be provided throw new SerDeException("For an avro schema, either " - + AvroSerdeUtils.SCHEMA_LITERAL + ", " + AvroSerdeUtils.SCHEMA_URL + " or " + + AvroTableProperties.SCHEMA_LITERAL.getPropName() + ", " + + AvroTableProperties.SCHEMA_URL.getPropName() + " or " + serdeConstants.SERIALIZATION_CLASS + " property must be set."); } @@ -254,13 +255,13 @@ public static void generateColumnTypes(Properties tbl, List colum if (serType.equalsIgnoreCase(AVRO_SERIALIZATION_TYPE)) { // for avro type, the serialization class parameter is optional schemaLiteral = - tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_LITERAL); - schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_URL); + tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_LITERAL.getPropName()); + schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_URL.getPropName()); if (schemaLiteral == null && schemaUrl == null) { // either schema literal or serialization class must be provided throw new SerDeException("For an avro schema, either " - + AvroSerdeUtils.SCHEMA_LITERAL + " property or " + + AvroTableProperties.SCHEMA_LITERAL.getPropName() + " property or " + serdeConstants.SERIALIZATION_CLASS + " property must be set."); } @@ -315,16 +316,16 @@ public static void generateColumnTypes(Properties tbl, List colum // for avro type, the serialization class parameter is optional schemaLiteral = tbl.getProperty(colMap.familyName + "." + qualifierName + "." - + AvroSerdeUtils.SCHEMA_LITERAL); + + AvroTableProperties.SCHEMA_LITERAL.getPropName()); schemaUrl = tbl.getProperty(colMap.familyName + "." + qualifierName + "." - + AvroSerdeUtils.SCHEMA_URL); + + AvroTableProperties.SCHEMA_URL.getPropName()); if (schemaLiteral == null && schemaUrl == null) { // either schema literal, schema url or serialization class must // be provided throw new SerDeException("For an avro schema, either " - + AvroSerdeUtils.SCHEMA_LITERAL + ", " + AvroSerdeUtils.SCHEMA_URL + " or " + + AvroTableProperties.SCHEMA_LITERAL.getPropName() + ", " + AvroTableProperties.SCHEMA_URL.getPropName() + " or " + serdeConstants.SERIALIZATION_CLASS + " property must be set."); } diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java index 43c1f0c..a11d3cd 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hive.hbase.struct.StructHBaseValueFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.util.ReflectionUtils; @@ -302,20 +302,20 @@ private Schema getSchema(Configuration conf, Properties tbl, ColumnMapping colMa schemaLiteral = tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "." - + AvroSerdeUtils.SCHEMA_LITERAL); + + AvroTableProperties.SCHEMA_LITERAL.getPropName()); schemaUrl = tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "." - + AvroSerdeUtils.SCHEMA_URL); + + AvroTableProperties.SCHEMA_URL.getPropName()); } else { serType = tbl.getProperty(colMap.familyName + "." + HBaseSerDe.SERIALIZATION_TYPE); serClassName = tbl.getProperty(colMap.familyName + "." + serdeConstants.SERIALIZATION_CLASS); - schemaLiteral = tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_LITERAL); + schemaLiteral = tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_LITERAL.getPropName()); - schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_URL); + schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_URL.getPropName()); } } else if (!colMap.hbaseRowKey) { // not an hbase row key. This should either be a prefix or an individual qualifier @@ -335,23 +335,23 @@ private Schema getSchema(Configuration conf, Properties tbl, ColumnMapping colMa schemaLiteral = tbl.getProperty(colMap.familyName + "." + qualifierName + "." - + AvroSerdeUtils.SCHEMA_LITERAL); + + AvroTableProperties.SCHEMA_LITERAL.getPropName()); schemaUrl = - tbl.getProperty(colMap.familyName + "." + qualifierName + "." + AvroSerdeUtils.SCHEMA_URL); + tbl.getProperty(colMap.familyName + "." + qualifierName + "." + AvroTableProperties.SCHEMA_URL.getPropName()); } if (serType == null) { throw new IllegalArgumentException("serialization.type property is missing"); } - String avroSchemaRetClass = tbl.getProperty(AvroSerdeUtils.SCHEMA_RETRIEVER); + String avroSchemaRetClass = tbl.getProperty(AvroTableProperties.SCHEMA_RETRIEVER.getPropName()); if (schemaLiteral == null && serClassName == null && schemaUrl == null && avroSchemaRetClass == null) { throw new IllegalArgumentException("serialization.type was set to [" + serType - + "] but neither " + AvroSerdeUtils.SCHEMA_LITERAL + ", " + AvroSerdeUtils.SCHEMA_URL - + ", serialization.class or " + AvroSerdeUtils.SCHEMA_RETRIEVER + " property was set"); + + "] but neither " + AvroTableProperties.SCHEMA_LITERAL.getPropName() + ", " + AvroTableProperties.SCHEMA_URL.getPropName() + + ", serialization.class or " + AvroTableProperties.SCHEMA_RETRIEVER.getPropName() + " property was set"); } Class deserializerClass = null; diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java index 3225e5c..514d5eb 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.serde2.avro.AvroLazyObjectInspector; import org.apache.hadoop.hive.serde2.avro.AvroSchemaRetriever; import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import org.apache.hadoop.hive.serde2.lazy.LazyFactory; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; @@ -57,7 +58,7 @@ public AvroHBaseValueFactory(int fieldID, Schema schema) { public void init(HBaseSerDeParameters hbaseParams, Configuration conf, Properties properties) throws SerDeException { super.init(hbaseParams, conf, properties); - String avroSchemaRetClass = properties.getProperty(AvroSerdeUtils.SCHEMA_RETRIEVER); + String avroSchemaRetClass = properties.getProperty(AvroTableProperties.SCHEMA_RETRIEVER.getPropName()); if (avroSchemaRetClass != null) { Class avroSchemaRetrieverClass = null; diff --git hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java index e28fc87..f244ed6 100644 --- hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java +++ hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java @@ -56,7 +56,7 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; -import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; @@ -1077,7 +1077,7 @@ public void testHBaseSerDeWithAvroSchemaInline() throws SerDeException, IOExcept private Properties createPropertiesForHiveAvroSchemaInline() { Properties tbl = new Properties(); tbl.setProperty("cola.avro.serialization.type", "avro"); - tbl.setProperty("cola.avro." + AvroSerdeUtils.SCHEMA_LITERAL, RECORD_SCHEMA); + tbl.setProperty("cola.avro." + AvroTableProperties.SCHEMA_LITERAL.getPropName(), RECORD_SCHEMA); tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:avro"); tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true"); @@ -1123,7 +1123,7 @@ public void testHBaseSerDeWithForwardEvolvedSchema() throws SerDeException, IOEx private Properties createPropertiesForHiveAvroForwardEvolvedSchema() { Properties tbl = new Properties(); tbl.setProperty("cola.avro.serialization.type", "avro"); - tbl.setProperty("cola.avro." + AvroSerdeUtils.SCHEMA_LITERAL, RECORD_SCHEMA_EVOLVED); + tbl.setProperty("cola.avro." + AvroTableProperties.SCHEMA_LITERAL.getPropName(), RECORD_SCHEMA_EVOLVED); tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:avro"); tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true"); @@ -1168,7 +1168,7 @@ public void testHBaseSerDeWithBackwardEvolvedSchema() throws SerDeException, IOE private Properties createPropertiesForHiveAvroBackwardEvolvedSchema() { Properties tbl = new Properties(); tbl.setProperty("cola.avro.serialization.type", "avro"); - tbl.setProperty("cola.avro." + AvroSerdeUtils.SCHEMA_LITERAL, RECORD_SCHEMA); + tbl.setProperty("cola.avro." + AvroTableProperties.SCHEMA_LITERAL.getPropName(), RECORD_SCHEMA); tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:avro"); tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true"); @@ -1283,7 +1283,7 @@ public void testHBaseSerDeWithAvroSchemaUrl() throws SerDeException, IOException private Properties createPropertiesForHiveAvroSchemaUrl(String schemaUrl) { Properties tbl = new Properties(); tbl.setProperty("cola.avro.serialization.type", "avro"); - tbl.setProperty("cola.avro." + AvroSerdeUtils.SCHEMA_URL, schemaUrl); + tbl.setProperty("cola.avro." + AvroTableProperties.SCHEMA_URL.getPropName(), schemaUrl); tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:avro"); tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true"); @@ -1333,7 +1333,7 @@ public void testHBaseSerDeWithAvroExternalSchema() throws SerDeException, IOExce private Properties createPropertiesForHiveAvroExternalSchema() { Properties tbl = new Properties(); tbl.setProperty("cola.avro.serialization.type", "avro"); - tbl.setProperty(AvroSerdeUtils.SCHEMA_RETRIEVER, + tbl.setProperty(AvroTableProperties.SCHEMA_RETRIEVER.getPropName(), "org.apache.hadoop.hive.hbase.HBaseTestAvroSchemaRetriever"); tbl.setProperty("cola.avro." + serdeConstants.SERIALIZATION_CLASS, "org.apache.hadoop.hive.hbase.avro.Employee"); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java index 8d58d74..89fac3f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; import org.apache.hadoop.hive.serde2.avro.AvroSerdeException; import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.JobConf; @@ -117,7 +118,7 @@ private Schema getSchema(JobConf job, FileSplit split) throws AvroSerdeException } Properties props = pathsAndParts.getValue().getProperties(); - if(props.containsKey(AvroSerdeUtils.SCHEMA_LITERAL) || props.containsKey(AvroSerdeUtils.SCHEMA_URL)) { + if(props.containsKey(AvroTableProperties.SCHEMA_LITERAL.getPropName()) || props.containsKey(AvroTableProperties.SCHEMA_URL.getPropName())) { return AvroSerdeUtils.determineSchemaOrThrowException(job, props); } else { @@ -133,7 +134,7 @@ private Schema getSchema(JobConf job, FileSplit split) throws AvroSerdeException // In "select * from table" situations (non-MR), we can add things to the job // It's safe to add this to the job since it's not *actually* a mapred job. // Here the global state is confined to just this process. - String s = job.get(AvroSerdeUtils.AVRO_SERDE_SCHEMA); + String s = job.get(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName()); if(s != null) { LOG.info("Found the avro schema in the job: " + s); return AvroSerdeUtils.getSchemaFor(s); diff --git serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java index 36dc484..008d9ec 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java +++ serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java @@ -21,20 +21,16 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.io.Writable; import org.junit.Test; -import org.mockito.Mockito; import java.util.List; import java.util.Properties; -import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AVRO_SERDE_SCHEMA; -import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_LITERAL; +//import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AVRO_SERDE_SCHEMA; +//import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_LITERAL; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class TestAvroSerde { @@ -70,10 +66,10 @@ public void initializeDoesNotReuseSchemasFromConf() throws SerDeException { // initialized. Therefore we need to make sure we don't look for any // old schemas within it. Configuration conf = new Configuration(); - conf.set(AVRO_SERDE_SCHEMA, originalSchema.toString(false)); + conf.set(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName(), originalSchema.toString(false)); Properties props = new Properties(); - props.put(SCHEMA_LITERAL, newSchemaString); + props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), newSchemaString); AvroSerDe asd = new AvroSerDe(); @@ -81,7 +77,7 @@ public void initializeDoesNotReuseSchemasFromConf() throws SerDeException { // Verify that the schema now within the configuration is the one passed // in via the properties - assertEquals(newSchema, AvroSerdeUtils.getSchemaFor(conf.get(AVRO_SERDE_SCHEMA))); + assertEquals(newSchema, AvroSerdeUtils.getSchemaFor(conf.get(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName()))); } @Test @@ -94,7 +90,7 @@ public void noSchemaProvidedThrowsException() { @Test public void gibberishSchemaProvidedReturnsErrorSchema() { Properties props = new Properties(); - props.put(AvroSerdeUtils.SCHEMA_LITERAL, "blahblahblah"); + props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), "blahblahblah"); verifyExpectedException(props); } @@ -102,7 +98,7 @@ public void gibberishSchemaProvidedReturnsErrorSchema() { @Test public void emptySchemaProvidedThrowsException() { Properties props = new Properties(); - props.put(AvroSerdeUtils.SCHEMA_LITERAL, ""); + props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), ""); verifyExpectedException(props); } @@ -110,7 +106,7 @@ public void emptySchemaProvidedThrowsException() { @Test public void badSchemaURLProvidedThrowsException() { Properties props = new Properties(); - props.put(AvroSerdeUtils.SCHEMA_URL, "not://a/url"); + props.put(AvroTableProperties.SCHEMA_URL.getPropName(), "not://a/url"); verifyExpectedException(props); } @@ -118,7 +114,7 @@ public void badSchemaURLProvidedThrowsException() { @Test public void emptySchemaURLProvidedThrowsException() { Properties props = new Properties(); - props.put(AvroSerdeUtils.SCHEMA_URL, ""); + props.put(AvroTableProperties.SCHEMA_URL.getPropName(), ""); verifyExpectedException(props); } @@ -126,8 +122,8 @@ public void emptySchemaURLProvidedThrowsException() { @Test public void bothPropertiesSetToNoneThrowsException() { Properties props = new Properties(); - props.put(AvroSerdeUtils.SCHEMA_URL, AvroSerdeUtils.SCHEMA_NONE); - props.put(AvroSerdeUtils.SCHEMA_LITERAL, AvroSerdeUtils.SCHEMA_NONE); + props.put(AvroTableProperties.SCHEMA_URL.getPropName(), AvroSerdeUtils.SCHEMA_NONE); + props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), AvroSerdeUtils.SCHEMA_NONE); verifyExpectedException(props); } diff --git serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java index e07d06b..0013b78 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java +++ serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java @@ -25,14 +25,12 @@ import org.junit.Test; import java.io.IOException; -import java.net.MalformedURLException; import java.net.URISyntaxException; import java.util.Properties; import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.EXCEPTION_MESSAGE; -import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_LITERAL; import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_NONE; -import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_URL; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.determineSchemaOrThrowException; import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.getOtherTypeFromNullableType; import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.isNullableType; @@ -127,7 +125,7 @@ public void determineSchemaFindsLiterals() throws Exception { String schema = TestAvroObjectInspectorGenerator.RECORD_SCHEMA; Configuration conf = new Configuration(); Properties props = new Properties(); - props.put(AvroSerdeUtils.SCHEMA_LITERAL, schema); + props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), schema); Schema expected = AvroSerdeUtils.getSchemaFor(schema); assertEquals(expected, AvroSerdeUtils.determineSchemaOrThrowException(conf, props)); } @@ -136,7 +134,7 @@ public void determineSchemaFindsLiterals() throws Exception { public void detemineSchemaTriesToOpenUrl() throws AvroSerdeException, IOException { Configuration conf = new Configuration(); Properties props = new Properties(); - props.put(AvroSerdeUtils.SCHEMA_URL, "not:///a.real.url"); + props.put(AvroTableProperties.SCHEMA_URL.getPropName(), "not:///a.real.url"); try { AvroSerdeUtils.determineSchemaOrThrowException(conf, props); @@ -152,8 +150,8 @@ public void noneOptionWorksForSpecifyingSchemas() throws IOException, AvroSerdeE Properties props = new Properties(); // Combo 1: Both set to none - props.put(SCHEMA_URL, SCHEMA_NONE); - props.put(SCHEMA_LITERAL, SCHEMA_NONE); + props.put(AvroTableProperties.SCHEMA_URL.getPropName(), SCHEMA_NONE); + props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), SCHEMA_NONE); try { determineSchemaOrThrowException(conf, props); fail("Should have thrown exception with none set for both url and literal"); @@ -162,7 +160,7 @@ public void noneOptionWorksForSpecifyingSchemas() throws IOException, AvroSerdeE } // Combo 2: Literal set, url set to none - props.put(SCHEMA_LITERAL, TestAvroObjectInspectorGenerator.RECORD_SCHEMA); + props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), TestAvroObjectInspectorGenerator.RECORD_SCHEMA); Schema s; try { s = determineSchemaOrThrowException(conf, props); @@ -173,8 +171,8 @@ public void noneOptionWorksForSpecifyingSchemas() throws IOException, AvroSerdeE } // Combo 3: url set, literal set to none - props.put(SCHEMA_LITERAL, SCHEMA_NONE); - props.put(SCHEMA_URL, "not:///a.real.url"); + props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), SCHEMA_NONE); + props.put(AvroTableProperties.SCHEMA_URL.getPropName(), "not:///a.real.url"); try { determineSchemaOrThrowException(conf, props); fail("Should have tried to open that bogus URL");