diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/pom.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/pom.xml
index f62230f..1c0caaf 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/pom.xml
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/pom.xml
@@ -33,6 +33,8 @@
${project.parent.parent.basedir}
+ 1.0.1
+ 4.5.0-SNAPSHOT
@@ -122,21 +124,70 @@
- org.apache.phoenix
- phoenix-core
- 4.3.0
-
-
-
- jline
- jline
-
-
+ org.apache.phoenix
+ phoenix-core
+ ${phoenix.version}
+
+
+
+ jline
+ jline
+
+
com.google.guava
guava
+
+ org.apache.hbase
+ hbase-client
+ ${hbase.version}
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ test-jar
+ test
+
+
+ org.apache.phoenix
+ phoenix-core
+ test-jar
+ ${phoenix.version}
+ test
+
+
+ org.apache.hbase
+ hbase-it
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-testing-util
+ ${hbase.version}
+ test
+ true
+
+
+ org.jruby
+ jruby-complete
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+
+
+
+
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixTimelineWriterImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixTimelineWriterImpl.java
index af8a233..0fce04d 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixTimelineWriterImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixTimelineWriterImpl.java
@@ -43,6 +43,7 @@
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
+import java.util.Properties;
import java.util.Set;
@Private
@@ -90,7 +91,10 @@
private static final String PHOENIX_STORAGE_SEPARATOR = ";";
/** Connection string to the deployed Phoenix cluster */
- static final String CONN_STRING = "jdbc:phoenix:localhost:2181:/hbase";
+ @VisibleForTesting
+ static String connString = "jdbc:phoenix:localhost:2181:/hbase";
+ @VisibleForTesting
+ static Properties connProperties = new Properties();
PhoenixTimelineWriterImpl() {
super((PhoenixTimelineWriterImpl.class.getName()));
@@ -178,7 +182,7 @@ static Connection getConnection() throws IOException {
Connection conn;
try {
Class.forName(DRIVER_CLASS_NAME);
- conn = DriverManager.getConnection(CONN_STRING);
+ conn = DriverManager.getConnection(connString, connProperties);
conn.setAutoCommit(false);
} catch (SQLException se) {
LOG.error("Failed to connect to phoenix server! "
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixTimelineWriterImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixTimelineWriterImpl.java
index a55893e..3880e80 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixTimelineWriterImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixTimelineWriterImpl.java
@@ -23,30 +23,37 @@
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
import org.junit.Test;
+import org.apache.phoenix.hbase.index.write.IndexWriterUtils;
+import org.apache.phoenix.query.BaseTest;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
-public class TestPhoenixTimelineWriterImpl {
- private PhoenixTimelineWriterImpl writer;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
- @Before
- public void setup() throws Exception {
- // TODO: launch a miniphoenix cluster, or else we're directly operating on
- // the active Phoenix cluster
+public class TestPhoenixTimelineWriterImpl extends BaseTest {
+ private static PhoenixTimelineWriterImpl writer;
+ private static final int BATCH_SIZE = 3;
+
+ @BeforeClass
+ public static void setup() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
- writer = createPhoenixWriter(conf);
+ writer = setupPhoenixClusterAndWriterForTest(conf);
}
- @Ignore
- @Test
+ @Test(timeout = 90000)
public void testPhoenixWriterBasic() throws Exception {
// Set up a list of timeline entities and write them back to Phoenix
int numEntity = 12;
@@ -91,25 +98,41 @@ public void testPhoenixWriterBasic() throws Exception {
verifySQLWithCount(sql, (numEntity / 4), "Number of events should be ");
}
- @After
- public void cleanup() throws Exception {
- // Note: it is assumed that we're working on a test only cluster, or else
- // this cleanup process will drop the entity table.
+ @AfterClass
+ public static void cleanup() throws Exception {
writer.dropTable(PhoenixTimelineWriterImpl.ENTITY_TABLE_NAME);
writer.dropTable(PhoenixTimelineWriterImpl.EVENT_TABLE_NAME);
writer.dropTable(PhoenixTimelineWriterImpl.METRIC_TABLE_NAME);
writer.serviceStop();
}
- private static PhoenixTimelineWriterImpl createPhoenixWriter(
+ private static PhoenixTimelineWriterImpl setupPhoenixClusterAndWriterForTest(
YarnConfiguration conf) throws Exception{
+ Map props = new HashMap<>();
+ // Must update config before starting server
+ props.put(QueryServices.STATS_USE_CURRENT_TIME_ATTRIB,
+ Boolean.FALSE.toString());
+ props.put("java.security.krb5.realm", "");
+ props.put("java.security.krb5.kdc", "");
+ props.put(IntegrationTestingUtility.IS_DISTRIBUTED_CLUSTER, Boolean.FALSE.toString());
+ props.put(QueryServices.QUEUE_SIZE_ATTRIB, Integer.toString(5000));
+ props.put(IndexWriterUtils.HTABLE_THREAD_KEY, Integer.toString(100));
+ // Make a small batch size to test multiple calls to reserve sequences
+ props.put(QueryServices.SEQUENCE_CACHE_SIZE_ATTRIB,
+ Long.toString(BATCH_SIZE));
+ // Must update config before starting server
+ setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+
PhoenixTimelineWriterImpl myWriter = new PhoenixTimelineWriterImpl();
+ // Change connection settings for test
+ myWriter.connString = getUrl();
+ myWriter.connProperties = PropertiesUtil.deepCopy(TEST_PROPERTIES);
myWriter.serviceInit(conf);
return myWriter;
}
private void verifySQLWithCount(String sql, int targetCount, String message)
- throws Exception{
+ throws Exception {
try (
Statement stmt =
PhoenixTimelineWriterImpl.getConnection().createStatement();