diff --git common/pom.xml common/pom.xml
index ad9f6c0..01c74ba 100644
--- common/pom.xml
+++ common/pom.xml
@@ -72,6 +72,12 @@
+ com.google.code.tempus-fugit
+ tempus-fugit
+ ${tempus-fugit.version}
+ test
+
+
junit
junit
${junit.version}
diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java
index 63f0394..f3b4729 100644
--- common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java
+++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java
@@ -18,10 +18,19 @@
package org.apache.hadoop.hive.common.type;
-import junit.framework.TestCase;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
-public class TestHiveChar extends TestCase {
+public class TestHiveChar {
+ @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+ @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testBasic() {
HiveChar hc = new HiveChar("abc", 10);
assertEquals("abc ", hc.toString());
@@ -47,6 +56,9 @@ public void testBasic() {
assertEquals(3, hc.getCharacterLength());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testStringLength() {
HiveChar hc = new HiveChar();
@@ -60,6 +72,9 @@ public void testStringLength() {
assertEquals("0123456789 ", hc.toString());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testComparison() {
HiveChar hc1 = new HiveChar();
HiveChar hc2 = new HiveChar();
diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
index 46a73f2..959989a 100644
--- common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
+++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
@@ -20,12 +20,19 @@
import java.math.BigDecimal;
import java.math.BigInteger;
-import org.junit.Assert;
-import org.junit.Test;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
public class TestHiveDecimal {
+ @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+ @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testPrecisionScaleEnforcement() {
String decStr = "1786135888657847525803324040144343378.09799306448796128931113691624";
HiveDecimal dec = HiveDecimal.create(decStr);
@@ -82,6 +89,8 @@ public void testPrecisionScaleEnforcement() {
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testMultiply() {
HiveDecimal dec1 = HiveDecimal.create("0.00001786135888657847525803");
HiveDecimal dec2 = HiveDecimal.create("3.0000123456789");
@@ -105,6 +114,8 @@ public void testMultiply() {
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testPow() {
HiveDecimal dec = HiveDecimal.create("3.00001415926");
Assert.assertEquals(dec.pow(2), dec.multiply(dec));
@@ -118,6 +129,8 @@ public void testPow() {
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testDivide() {
HiveDecimal dec1 = HiveDecimal.create("3.14");
HiveDecimal dec2 = HiveDecimal.create("3");
@@ -133,6 +146,8 @@ public void testDivide() {
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testPlus() {
HiveDecimal dec1 = HiveDecimal.create("99999999999999999999999999999999999");
HiveDecimal dec2 = HiveDecimal.create("1");
@@ -145,6 +160,8 @@ public void testPlus() {
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testSubtract() {
HiveDecimal dec1 = HiveDecimal.create("3.140");
HiveDecimal dec2 = HiveDecimal.create("1.00");
@@ -152,6 +169,8 @@ public void testSubtract() {
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testPosMod() {
HiveDecimal hd1 = HiveDecimal.create("-100.91");
HiveDecimal hd2 = HiveDecimal.create("9.8");
@@ -160,12 +179,16 @@ public void testPosMod() {
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testHashCode() {
Assert.assertEquals(HiveDecimal.create("9").hashCode(), HiveDecimal.create("9.00").hashCode());
Assert.assertEquals(HiveDecimal.create("0").hashCode(), HiveDecimal.create("0.00").hashCode());
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testException() {
HiveDecimal dec = HiveDecimal.create("3.1415.926");
Assert.assertNull(dec);
@@ -174,6 +197,8 @@ public void testException() {
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testBinaryConversion() {
testBinaryConversion("0.00");
testBinaryConversion("-12.25");
diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
index f8da48d..309d042 100644
--- common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
+++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.common.type;
-import junit.framework.TestCase;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.common.LogUtils;
@@ -28,8 +27,15 @@
import java.io.InputStreamReader;
import java.util.Random;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
+
+public class TestHiveVarchar {
+ @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+ @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-public class TestHiveVarchar extends TestCase {
public TestHiveVarchar() {
super();
}
@@ -65,6 +71,9 @@ public static int getRandomCodePoint(int excludeChar) {
}
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testStringLength() throws Exception {
int strLen = 20;
int[] lengths = { 15, 20, 25 };
@@ -124,6 +133,9 @@ public void testStringLength() throws Exception {
assertEquals(5, vc1.getCharacterLength());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testComparison() throws Exception {
HiveVarchar hc1 = new HiveVarchar("abcd", 20);
HiveVarchar hc2 = new HiveVarchar("abcd", 20);
diff --git serde/pom.xml serde/pom.xml
index 9f327f0..b7bc4f0 100644
--- serde/pom.xml
+++ serde/pom.xml
@@ -78,6 +78,12 @@
+ com.google.code.tempus-fugit
+ tempus-fugit
+ ${tempus-fugit.version}
+ test
+
+
junit
junit
${junit.version}
diff --git serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
new file mode 100644
index 0000000..75de0a6
--- /dev/null
+++ serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
@@ -0,0 +1,138 @@
+package org.apache.hadoop.hive.serde2.io;
+
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+
+import static org.junit.Assert.*;
+import java.io.*;
+import java.sql.Date;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+
+public class TestDateWritable {
+
+ @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+ @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
+ public void testConstructor() {
+ Date date = Date.valueOf(getRandomDateString());
+ DateWritable dw1 = new DateWritable(date);
+ DateWritable dw2 = new DateWritable(dw1);
+ DateWritable dw3 = new DateWritable(dw1.getDays());
+
+ assertEquals(dw1, dw1);
+ assertEquals(dw1, dw2);
+ assertEquals(dw2, dw3);
+ assertEquals(date, dw1.get());
+ assertEquals(date, dw2.get());
+ assertEquals(date, dw3.get());
+ }
+
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
+ public void testComparison() {
+ // Get 2 different dates
+ Date date1 = Date.valueOf(getRandomDateString());
+ Date date2 = Date.valueOf(getRandomDateString());
+ while (date1.equals(date2)) {
+ date2 = Date.valueOf(getRandomDateString());
+ }
+
+ DateWritable dw1 = new DateWritable(date1);
+ DateWritable dw2 = new DateWritable(date2);
+ DateWritable dw3 = new DateWritable(date1);
+
+ assertTrue("Dates should be equal", dw1.equals(dw1));
+ assertTrue("Dates should be equal", dw1.equals(dw3));
+ assertTrue("Dates should be equal", dw3.equals(dw1));
+ assertEquals("Dates should be equal", 0, dw1.compareTo(dw1));
+ assertEquals("Dates should be equal", 0, dw1.compareTo(dw3));
+ assertEquals("Dates should be equal", 0, dw3.compareTo(dw1));
+
+ assertFalse("Dates not should be equal", dw1.equals(dw2));
+ assertFalse("Dates not should be equal", dw2.equals(dw1));
+ assertTrue("Dates not should be equal", 0 != dw1.compareTo(dw2));
+ assertTrue("Dates not should be equal", 0 != dw2.compareTo(dw1));
+ }
+
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
+ public void testGettersSetters() {
+ Date date1 = Date.valueOf(getRandomDateString());
+ Date date2 = Date.valueOf(getRandomDateString());
+ Date date3 = Date.valueOf(getRandomDateString());
+ DateWritable dw1 = new DateWritable(date1);
+ DateWritable dw2 = new DateWritable(date2);
+ DateWritable dw3 = new DateWritable(date3);
+ DateWritable dw4 = new DateWritable();
+
+ // Getters
+ assertEquals(date1, dw1.get());
+ assertEquals(date1.getTime() / 1000, dw1.getTimeInSeconds());
+
+ dw4.set(Date.valueOf("1970-01-02"));
+ assertEquals(1, dw4.getDays());
+ dw4.set(Date.valueOf("1971-01-01"));
+ assertEquals(365, dw4.getDays());
+
+ // Setters
+ dw4.set(dw1.getDays());
+ assertEquals(dw1, dw4);
+
+ dw4.set(dw2.get());
+ assertEquals(dw2, dw4);
+
+ dw4.set(dw3);
+ assertEquals(dw3, dw4);
+ }
+
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
+ public void testWritableMethods() throws Throwable {
+ DateWritable dw1 = new DateWritable(Date.valueOf(getRandomDateString()));
+ DateWritable dw2 = new DateWritable();
+ ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+ DataOutput out = new DataOutputStream(byteStream);
+
+ dw1.write(out);
+ dw2.readFields(new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray())));
+
+ assertEquals("Dates should be equal", dw1, dw2);
+ }
+
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
+ public void testDateValueOf() {
+ // Just making sure Date.valueOf() works ok
+ String dateStr = getRandomDateString();
+ Date date = Date.valueOf(dateStr);
+ assertEquals(dateStr, date.toString());
+ }
+
+ private static String[] dateStrings = new String[365];
+
+ @BeforeClass
+ public static void setupDateStrings() {
+ DateFormat format = new SimpleDateFormat("yyyy-MM-dd");
+ Date initialDate = Date.valueOf("2014-01-01");
+ Calendar cal = Calendar.getInstance();
+ cal.setTime(initialDate);
+ for (int idx = 0; idx < 365; ++idx) {
+ dateStrings[idx] = format.format(cal.getTime());
+ cal.add(1, Calendar.DAY_OF_YEAR);
+ }
+ }
+
+ private static String getRandomDateString() {
+ return dateStrings[(int) (Math.random() * 365)];
+ }
+}
diff --git serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java
index c8bb311..ee4292d 100644
--- serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java
+++ serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java
@@ -18,10 +18,20 @@
package org.apache.hadoop.hive.serde2.io;
-import junit.framework.TestCase;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+
+import static org.junit.Assert.*;
import org.apache.hadoop.hive.common.type.HiveChar;
-public class TestHiveCharWritable extends TestCase {
+public class TestHiveCharWritable {
+ @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+ @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testConstructor() throws Exception {
HiveCharWritable hcw1 = new HiveCharWritable(new HiveChar("abc", 5));
assertEquals("abc ", hcw1.toString());
@@ -30,6 +40,9 @@ public void testConstructor() throws Exception {
assertEquals("abc ", hcw2.toString());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testSet() throws Exception {
HiveCharWritable hcw1 = new HiveCharWritable();
@@ -70,18 +83,27 @@ public void testSet() throws Exception {
assertEquals("ab", hcw1.getTextValue().toString());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testGetHiveChar() throws Exception {
HiveCharWritable hcw = new HiveCharWritable();
hcw.set("abcd", 10);
assertEquals("abcd ", hcw.getHiveChar().toString());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testGetCharacterLength() throws Exception {
HiveCharWritable hcw = new HiveCharWritable();
hcw.set("abcd", 10);
assertEquals(4, hcw.getCharacterLength());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testEnforceMaxLength() {
HiveCharWritable hcw1 = new HiveCharWritable();
hcw1.set("abcdefghij", 10);
@@ -92,6 +114,9 @@ public void testEnforceMaxLength() {
assertEquals("abcde", hcw1.toString());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testComparison() throws Exception {
HiveCharWritable hcw1 = new HiveCharWritable();
HiveCharWritable hcw2 = new HiveCharWritable();
diff --git serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
index 849646b..3b12514 100644
--- serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
+++ serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
@@ -18,7 +18,10 @@
package org.apache.hadoop.hive.serde2.io;
-import junit.framework.Assert;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
import java.math.BigDecimal;
import java.math.BigInteger;
@@ -29,8 +32,6 @@
import org.apache.hadoop.hive.common.type.Decimal128;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hive.common.util.Decimal128FastBuffer;
-import org.junit.Before;
-import org.junit.Test;
/**
* Unit tests for tsting the fast allocation-free conversion
@@ -38,14 +39,15 @@
*/
public class TestHiveDecimalWritable {
- private Decimal128FastBuffer scratch;
+ @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+ @Rule public RepeatingRule repeatingRule = new RepeatingRule();
@Before
public void setUp() throws Exception {
- scratch = new Decimal128FastBuffer();
}
private void doTestFastStreamForHiveDecimal(String valueString) {
+ Decimal128FastBuffer scratch = new Decimal128FastBuffer();
BigDecimal value = new BigDecimal(valueString);
Decimal128 dec = new Decimal128();
dec.update(value);
@@ -61,21 +63,23 @@ private void doTestFastStreamForHiveDecimal(String valueString) {
BigDecimal readValue = hd.bigDecimalValue();
- Assert.assertEquals(value, readValue);
+ assertEquals(value, readValue);
// Now test fastUpdate from the same serialized HiveDecimal
Decimal128 decRead = new Decimal128().fastUpdateFromInternalStorage(
witness.getInternalStorage(), (short) witness.getScale());
- Assert.assertEquals(dec, decRead);
+ assertEquals(dec, decRead);
// Test fastUpdate from it's own (not fully compacted) serialized output
Decimal128 decReadSelf = new Decimal128().fastUpdateFromInternalStorage(
hdw.getInternalStorage(), (short) hdw.getScale());
- Assert.assertEquals(dec, decReadSelf);
+ assertEquals(dec, decReadSelf);
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testFastStreamForHiveDecimal() {
doTestFastStreamForHiveDecimal("0");
@@ -217,7 +221,10 @@ void doTestDecimalWithBoundsCheck(Decimal128 value) {
}
@Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testHive6594() {
+ Decimal128FastBuffer scratch = new Decimal128FastBuffer();
String[] vs = new String[] {
"-4033.445769230769",
"6984454.211097692"};
@@ -236,7 +243,7 @@ public void testHive6594() {
BigDecimal readValue = hd.bigDecimalValue();
- Assert.assertEquals(d.toBigDecimal().stripTrailingZeros(),
+ assertEquals(d.toBigDecimal().stripTrailingZeros(),
readValue.stripTrailingZeros());
}
}
diff --git serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java
index 74e7228..32d0da7 100644
--- serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java
+++ serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java
@@ -17,11 +17,21 @@
*/
package org.apache.hadoop.hive.serde2.io;
-import junit.framework.TestCase;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
+
import org.apache.hadoop.hive.common.type.HiveVarchar;
import java.io.*;
-public class TestHiveVarcharWritable extends TestCase {
+public class TestHiveVarcharWritable {
+ @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+ @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testStringLength() throws Exception {
HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
assertEquals(10, vc1.getCharacterLength());
@@ -54,6 +64,9 @@ public void testStringLength() throws Exception {
assertEquals(6, vc1.getCharacterLength());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testEnforceLength() throws Exception {
HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
assertEquals(10, vc1.getCharacterLength());
@@ -66,8 +79,11 @@ public void testEnforceLength() throws Exception {
vc1.enforceMaxLength(8);
assertEquals(8, vc1.getCharacterLength());
-}
+ }
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testComparison() throws Exception {
HiveVarcharWritable hc1 = new HiveVarcharWritable(new HiveVarchar("abcd", 20));
HiveVarcharWritable hc2 = new HiveVarcharWritable(new HiveVarchar("abcd", 20));
@@ -101,6 +117,9 @@ public void testComparison() throws Exception {
assertFalse(0 == hc2.compareTo(hc1));
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testStringValue() throws Exception {
HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("abcde", 20));
assertEquals("abcde", vc1.toString());
diff --git serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
index 0e7b418..5fee019 100644
--- serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
+++ serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
@@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hive.serde2.io;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
@@ -32,15 +35,25 @@
import java.util.Random;
import java.util.TimeZone;
-import junit.framework.TestCase;
+import org.junit.*;
+import static org.junit.Assert.*;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
-public class TestTimestampWritable extends TestCase {
+public class TestTimestampWritable {
+
+ @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+ @Rule public RepeatingRule repeatingRule = new RepeatingRule();
- private static DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ private static ThreadLocal DATE_FORMAT =
+ new ThreadLocal() {
+ @Override
+ protected synchronized DateFormat initialValue() {
+ return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ }
+ };
private static final int HAS_DECIMAL_MASK = 0x80000000;
@@ -64,14 +77,14 @@ private static long getSeconds(Timestamp ts) {
private static long parseToMillis(String s) {
try {
- return DATE_FORMAT.parse(s).getTime();
+ return DATE_FORMAT.get().parse(s).getTime();
} catch (ParseException ex) {
throw new RuntimeException(ex);
}
}
- @Override
- protected void setUp() {
+ @Before
+ public void setUp() {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
}
@@ -252,6 +265,9 @@ private static TimestampWritable fromIntAndVInts(int i, long... vints) throws IO
return tsw;
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testReverseNanos() {
assertEquals(0, reverseNanos(0));
assertEquals(120000000, reverseNanos(21));
@@ -265,6 +281,8 @@ public void testReverseNanos() {
* Test serializing and deserializing timestamps that can be represented by a number of seconds
* from 0 to 2147483647 since the UNIX epoch.
*/
+ @Test
+ @Concurrent(count=4)
public void testTimestampsWithinPositiveIntRange() throws IOException {
Random rand = new Random(294722773L);
for (int i = 0; i < 10000; ++i) {
@@ -281,6 +299,8 @@ private static long randomMillis(long minMillis, long maxMillis, Random rand) {
* Test timestamps that don't necessarily fit between 1970 and 2038. This depends on HIVE-4525
* being fixed.
*/
+ @Test
+ @Concurrent(count=4)
public void testTimestampsOutsidePositiveIntRange() throws IOException {
Random rand = new Random(789149717L);
for (int i = 0; i < 10000; ++i) {
@@ -289,6 +309,8 @@ public void testTimestampsOutsidePositiveIntRange() throws IOException {
}
}
+ @Test
+ @Concurrent(count=4)
public void testTimestampsInFullRange() throws IOException {
Random rand = new Random(2904974913L);
for (int i = 0; i < 10000; ++i) {
@@ -296,6 +318,8 @@ public void testTimestampsInFullRange() throws IOException {
}
}
+ @Test
+ @Concurrent(count=4)
public void testToFromDouble() {
Random rand = new Random(294729777L);
for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) {
@@ -326,6 +350,8 @@ private static HiveDecimal timestampToDecimal(Timestamp ts) {
return HiveDecimal.create(d);
}
+ @Test
+ @Concurrent(count=4)
public void testDecimalToTimestampRandomly() {
Random rand = new Random(294729777L);
for (int i = 0; i < 10000; ++i) {
@@ -336,6 +362,9 @@ public void testDecimalToTimestampRandomly() {
}
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testDecimalToTimestampCornerCases() {
Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33"));
assertEquals(0, ts.getTime() % 1000);
@@ -347,6 +376,9 @@ public void testDecimalToTimestampCornerCases() {
}
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testSerializationFormatDirectly() throws IOException {
assertEquals("1970-01-01 00:00:00", fromIntAndVInts(0).toString());
assertEquals("1970-01-01 00:00:01", fromIntAndVInts(1).toString());
@@ -374,6 +406,9 @@ public void testSerializationFormatDirectly() throws IOException {
-3210 - 1, seconds >> 31).toString());
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testMaxSize() {
// This many bytes are necessary to store the reversed nanoseconds.
assertEquals(5, WritableUtils.getVIntSize(999999999));
@@ -396,6 +431,9 @@ public void testMaxSize() {
// Therefore, the maximum total size of a serialized timestamp is 4 + 5 + 4 = 13.
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testMillisToSeconds() {
assertEquals(0, TimestampWritable.millisToSeconds(0));
assertEquals(-1, TimestampWritable.millisToSeconds(-1));
@@ -427,6 +465,9 @@ private static int normalizeComparisonResult(int result) {
return result < 0 ? -1 : (result > 0 ? 1 : 0);
}
+ @Test
+ @Concurrent(count=4)
+ @Repeating(repetition=100)
public void testBinarySortable() {
Random rand = new Random(5972977L);
List tswList = new ArrayList();